diff --git a/.gitattributes b/.gitattributes index 7f5ae2201d5e0641657f4c54c38b2b5ca693d672..e803f43a1ef46125f9e7269de0596f1918e0f728 100644 --- a/.gitattributes +++ b/.gitattributes @@ -336,3 +336,218 @@ Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sqlalchemy/cyextension Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sqlalchemy/cyextension/collections.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sqlalchemy/cyextension/processors.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sqlalchemy/cyextension/util.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_unuran/unuran_wrapper.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distribution_infrastructure.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_morestats.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_multivariate.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ellip_harm_2.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/cython_special.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_special_ufuncs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_gufuncs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_test_internal.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_specfun.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/__pycache__/_basic.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/integrate/__pycache__/_lebedev.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_ansari_swilk_statistics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_biasedurn.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats_pythran.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_qmc_cy.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/constants/__pycache__/_codata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_ppoly.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dfitpack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dierckx.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rbfinterp_pythran.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_interpnd.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rgi_cython.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/internals.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/lib.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/missing.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/ops.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/parsers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sas.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/reshape.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/testing.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sparse.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslib.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/index.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/writers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashing.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/arrays.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/period.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/indexers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/aggregations.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/strings/__pycache__/accessor.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/base.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/generic.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/_umath_linalg.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/__pycache__/_linalg.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_bounded_integers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_mt19937.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/bit_generator.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_common.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_philox.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_pcg64.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/mtrand.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_generator.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/fft/_pocketfft_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/ma/__pycache__/core.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_tests.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/lib/libnpymath.a filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/logic/__pycache__/boolalg.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/matrices/__pycache__/matrixbase.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polytools.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/numbers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/function.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solvers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solveset.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3 filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3.10 filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/yaml/_yaml.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_tests.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/fft/_pocketfft_umath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/lib/libnpymath.a filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/bit_generator.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_bounded_integers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_common.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_mt19937.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/mtrand.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_philox.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_pcg64.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/random/_generator.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/ma/__pycache__/core.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/linalg/_umath_linalg.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/numpy/linalg/__pycache__/_linalg.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/functorch/_C.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/svm/_libsvm.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/svm/_libsvm_sparse.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/_cyutility.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/_isotonic.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/tree/_criterion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/tree/_partitioner.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/tree/_splitter.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/tree/_utils.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/tree/_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_dist_metrics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/cluster/_expected_mutual_info_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/__pycache__/_classification.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_argkmin.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_argkmin_classmode.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_base.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_middle_term_computer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors_classmode.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/svm/_liblinear.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/neighbors/_ball_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/neighbors/_quad_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/neighbors/_kd_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/ensemble/_gradient_boosting.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/ensemble/_hist_gradient_boosting/_predictor.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/ensemble/_hist_gradient_boosting/histogram.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/ensemble/_hist_gradient_boosting/splitting.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/linear_model/_cd_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/linear_model/_sgd_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/linear_model/_sag_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_cython_blas.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_fast_dict.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_isfinite.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_seq_dataset.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_random.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_typedefs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/_vector_sentinel.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/arrayfuncs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/murmurhash.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/utils/sparsefuncs_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/preprocessing/_csr_polynomial_expansion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/preprocessing/_target_encoder_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/preprocessing/__pycache__/_data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/decomposition/_cdnmf_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/decomposition/_online_lda_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_k_means_elkan.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_k_means_common.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_hierarchical_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_k_means_lloyd.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_k_means_minibatch.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_hdbscan/_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_hdbscan/_reachability.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/cluster/_hdbscan/_linkage.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/sklearn/manifold/_barnes_hut_tsne.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d23fc161d8e679a98a3439a53bbe55cb523f1a86 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b90105aef1f646d7dc3521607e89df9c9df6a91395f2a22ac4e044fc2ca1cc01 +size 122581 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..43fae77180cadfe54e012b44dd9146102255d273 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a98a5a999a98a80b5fc983c5d1e7c3b908bb7bb2fd63e958f798f337f318844c +size 3345225 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..cff504c62a7c39ce8f05c01e8f698fb3c4bead89 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d33a3a95cd002e3e842fcb3fc746c32769c0d872dd291d8b788b8f0673faf7a +size 141369 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..617865939a69fcddd9ec9d70ab60727fceb862c4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:edbd7b4ebdb256658faee76146f44bba3b01123be80c423ed812be531446b45e +size 298297 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..1a892b72beb66dc64ac3815ab474431f0e46870c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7335e66653fa0d71d83cfcc1ecd787356d9aa026046cf03f8bcda31b1c33e766 +size 161896 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..8efd57a196b4286955d185865420c044326b16c6 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a739b2a056e66c89e4224fe2917769336e2e50f6bd19fe664ea9b8a94de16b8 +size 1023144 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3a5f05ec4b742ba5195833ec94f0169041da6988 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2dc3aedef36c324c45160a2a088e6c077306d7a2705d8ae6bf3eecf5bc7ede2 +size 1401456 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..5bbdaab42969cc1851bf847e167fe1f8af772602 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ecbf3f9d30a6ff1ff9842bb25f0819069fa7eebddb261db218a384a25e29765e +size 4719832 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..96f62153a778fed954a28a6ff0673d9ae2af212f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:684048415ffc4446394e1ac6cbefdee334b27ddc674a8b7f181e624e8cbcf871 +size 916304 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..478545442cd4ecdc7d98c64cc34daedd149bcb2a --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eed28ce918cfc93e2528528b9743bcf4c0dac124eeb0f5757115a28b5665f66a +size 1120528 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..91709e2edcde1e7283ae84bbfea3f7a9939ed732 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3597ceeae0d01a864b3346b2befc11eef926b8ae0b0fd2a29e13b87dc64940cf +size 1564752 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..ba4dc35bfd24ff07fe43db789da1718aa23004fe --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03d52e27ca281b4c0d797a08d7b48de89d17814514ef31a3071e0ac92dd0e30d +size 6666144 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..915242b5e3dbf86bf66cb727401499c97c2af5bb --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe460a9e125959e1b72c10c2aa312aac4073e22ea90aaa7cc594f9b12faa1d08 +size 122042 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0872e714b1c67c8c21d681830e94c8aa87820aa7 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91ec61dd1e901728ac3cefa8f66bd47f9350e272788045a4051c1b0b219c61b2 +size 192941 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d10873485ef27a64d5c4973ade12b20bdc115e8 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07a0310b74f3397bfc69237d6abf76417f426367d76970df26067c40e1053afd +size 139723 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_tests.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_tests.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..cd20fba72b009f42de97694ae93634a920e11464 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_tests.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ccbdf116b63f72150c1b1b414221069a4eb500837f735441fc2edce6bf004aa +size 178888 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3004735bbdd898e3ad2952fc2dd0c17548ea4008 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c634eaf05367af02b815cf3d2020fbeee9b47426af1fe26cf3189e4798932dc5 +size 10449209 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..c89abe23c3385fbcdf2c0a1df30869cee3502a80 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:49494075a89ff8971bfd46a3302d2296a0a72a822b6851941d5df6617871026f +size 3038216 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/lib/libnpymath.a b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/lib/libnpymath.a new file mode 100644 index 0000000000000000000000000000000000000000..c105ea0a53bdece5ed9f0b6239b5c2d62685b45b --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/lib/libnpymath.a @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:460de00974f1a67cb61e1fa36452a1e8a0d8aaece3709925ba61272c78505d0d +size 118712 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/tests/data/astype_copy.pkl b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/tests/data/astype_copy.pkl new file mode 100644 index 0000000000000000000000000000000000000000..45694ae001c4a103365ff9fd5ae2da0dba3c11f6 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/_core/tests/data/astype_copy.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9564b309cbf3441ff0a6e4468fddaca46230fab34f15c77d87025a455bdf59d9 +size 716 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/fft/_pocketfft_umath.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/fft/_pocketfft_umath.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..59fa799790586e48ce688edbc38a71ecb32959b8 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/fft/_pocketfft_umath.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdf266bd798225e5a1fedc24f792b4b45d52bd960adc94fd3cb76108b253d42b +size 649272 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bacfa67eab0385c8d085f721af19a27004e0e410 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fab1f971bf9b0bfbdd87e366c92b40944e3f3ee35f610818c42733e3f48c1d37 +size 167724 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-np0-objarr.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-np0-objarr.npy new file mode 100644 index 0000000000000000000000000000000000000000..14bdd93287e55e44dccd4d2fe3c0a427f2ce771c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-np0-objarr.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64ba08ecade24295c392ea03175626c9ce896f0e099e06d0282f60adab9546c9 +size 258 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npy new file mode 100644 index 0000000000000000000000000000000000000000..0d41ae5223b7235d16081318703875ef495bbfc7 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:178732502fbf4c1f504852c0a3673b738e2b0ba35460882b7c6c7d3ea58f48a9 +size 258 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npz new file mode 100644 index 0000000000000000000000000000000000000000..85da7be0c3d77ff8d71ab7671e0d23188b7b36c4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py2-objarr.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c68d771c14f415b159daabd9cf42d61836f74ae40049269787baca7d57098f1e +size 366 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npy new file mode 100644 index 0000000000000000000000000000000000000000..31847dc50272bcc123ec8bda0526c6e66d522923 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee6b6290a9475e9e2e9d984cf1e068b7c0a49e5c750687c975def7369d8f5bca +size 325 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npz new file mode 100644 index 0000000000000000000000000000000000000000..0c4cf0e129c7293076e9e7ccbf0d189996d5a27f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/py3-objarr.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd5465f7f359effabe86376e52bf185a7cd1cbc1123659af30f95a4920baf5f9 +size 453 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/python3.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/python3.npy new file mode 100644 index 0000000000000000000000000000000000000000..1de3124ee4eaf42cfc3625cdfa88767de07df9c4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/python3.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f469dde101a2c65e283d2ed487028f8180ebcb9457cf60c9d9b952314668fed +size 96 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/win64python2.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/win64python2.npy new file mode 100644 index 0000000000000000000000000000000000000000..46edfc4d6869ed05b8438bc5bfa5d77d9ec3e9ba --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/lib/tests/data/win64python2.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a039c807558149ad5fa7ad12436c69d49c5e194cf617b92785f8cb60ec63297 +size 96 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/__pycache__/_linalg.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/__pycache__/_linalg.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..571d3322f568b35a4999da1f3440df2d83b6dfbb --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/__pycache__/_linalg.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae84ec12381416db70fe30cf7a8f1d6452d9c9e2ce60d939bec73ba10833a839 +size 107207 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/_umath_linalg.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/_umath_linalg.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..ec189fbd885a51f32c3a8b495a1d8566c54dbe36 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/linalg/_umath_linalg.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6bc9d5c4db6cea333d46d905e15fbcd8f541ec5b23704ca3c44547ee2ee5187 +size 227657 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/ma/__pycache__/core.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/ma/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..701c4a9ce4413a5684286ab5879559454554274a --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/ma/__pycache__/core.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5896ab58857d836c6d33779c82b3fc2c9484f818260aa789ee2a5a3587568ed6 +size 234081 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_bounded_integers.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_bounded_integers.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..98ac82392b1ec1ac48cb760c4de2c92e89fc483e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_bounded_integers.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b4ff98f2bb0f7db24091b7996dd4d5c6ba5ae584f3683fcfa72ad8bce926ef1 +size 362560 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_common.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_common.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..79aa4b6c20c7584869791de7ac092ea3d2fa9d0e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_common.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:209653a11ba1914f706e972fd071be37c3207e182918c81fc7fab43cd66c5c28 +size 251664 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_generator.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_generator.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..20c6df41d36c781659fba215561c28c358db1165 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_generator.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b71b3b7481e842a207aa2b06ac7547721ed56366c5bccb7943616591e6075fb9 +size 996144 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_mt19937.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_mt19937.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..a883f37a51b6356f5cd065a6799e984e1bfd4f1c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_mt19937.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6245585dae2024446b9cc6f86f8f5d7ffb90054f2c50be0ede4b05663392f527 +size 129912 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_pcg64.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_pcg64.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..24800bfb6e03ebd0582eda5c650499f1fee163ad --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_pcg64.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:839bd03aa79e83d3aa67733423ca65fd47e4af535145c0f7384d7c88642d3ef8 +size 136480 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_philox.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_philox.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..9d312ddefd4d6b331b95b75353a48f32402ea619 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/_philox.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b166c0672edb46c1c9cbf720867d95aea2e9a323bc069157c9f0c15884ea5882 +size 116832 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/bit_generator.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/bit_generator.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..2856b1ac335d919dfe83131635f6f94ea616163d --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/bit_generator.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa02602384f668d5551761a14c616893a01ac02965092cdaf52b86fd2584d9c3 +size 242528 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/mtrand.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/mtrand.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..bb5b6d958527d0d58bab1cd215f5643631d9566f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/mtrand.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ef34372d188edcd794777a72a2eb86010f17736d3d7195b4bd6bf848ea7906c +size 797808 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np121.pkl.gz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np121.pkl.gz new file mode 100644 index 0000000000000000000000000000000000000000..c553433249c7ff23400c61a294f52591da508c5e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np121.pkl.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11f43e5fbd0a9078010055f6a483dc09497830d3f564d44e697394ef9bdd8aa3 +size 203 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np126.pkl.gz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np126.pkl.gz new file mode 100644 index 0000000000000000000000000000000000000000..c097e45919299171caefeff4a800cafad9f903e4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/generator_pcg64_np126.pkl.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cdf1d78d5715fe1c42c0d5e219df691db58bdce21c0ac9aeb0bf4d0625e1f46 +size 208 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/sfc64_np126.pkl.gz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/sfc64_np126.pkl.gz new file mode 100644 index 0000000000000000000000000000000000000000..b9f88985ca2c492d4c96adab79151d824dfc6cad --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/numpy/random/tests/data/sfc64_np126.pkl.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3156b5ca5172ec350f81404afa821e292755978518122377019ec6dec773cdac +size 290 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..e36b7ca16e87d18816e00887cbe7c96ae14d42ca --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60df1b8f4dd7475bb7f52184d344a602211b97befdfeefc4a3c580962ceea6e9 +size 2061120 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/arrays.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/arrays.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..5b51d47360dbbc8b3721df5f44c89f86558ac457 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/arrays.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f2c62feda631484792855ea9102103394ea040d3fa99309d8ae16e6bdf8f410 +size 125056 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..2ca1e5dc374766cafa951a42f77a48c150af653c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9be822a671428f098a68c02db95670d6cae902d1f784436e0c3693a7605a3749 +size 2387920 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashing.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashing.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..22a04b433a475bfb46e47de5dc241b97fb81712f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashing.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bef942d4801f88dae2f07bdded68ae7ae8178387a63d9d114469bee7f35d1bd4 +size 204640 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..6b2f324b336e6b0467625f84e78353f9276155d1 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18b788bd7c5412baa0d2cff075527d3198880b5de3278ca2e21c2683b7a56031 +size 2076960 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/index.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/index.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..fa2ad2a38c83dc3f130be9f6079eff6f123c8daa --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/index.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fb9854f57162a09bad83b967dfd42e1f958509d671c2d54f4f6725a8c4e4029 +size 877680 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/internals.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/internals.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..bba7689442ac3f25189cda03c6f46a8592e89c92 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/internals.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed6af1148db38bb530645c5239eadf0035cef132b96c12a59543a200081f5ef6 +size 390768 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..d368b60ae12dce032c93f96cfe2b03f47f8c4d8e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:62be054d97900855b247f663ea07f91ccd4a3edf690a3c0240ba1ab86e241ab6 +size 1417760 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..90a70ac2af56c58b9ee434624d080dadfdf67597 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e81a5f4dd3eaaca34475dec52af92110ab04dabc06402996771676fbe5d68060 +size 1379776 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/lib.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/lib.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..bb6600b4c45d5ea0fb818cd161bfe012ca55c7bf --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/lib.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:004560223c802b18f91b57dfd69ecab02a4da18d0b6bdea16c7a67620f197230 +size 842176 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/missing.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/missing.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..2529e828d3fca4235c26b62eafe9a8bdecf03a93 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/missing.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cce763b839c03758200dc018054e35209dda656b775463c5f42c26d32d0fe3ba +size 190912 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/ops.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/ops.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..d3cf910000e4b43fc3f8afffcbcc0dc8f4c36a82 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/ops.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbe3d889855a3ef72eca34fcd76195213eba00ac70d4921b06aa62f50e650b30 +size 249856 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/parsers.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/parsers.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..ed4d3525aafb7f4f907cef0dd61f2aba5b602b73 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/parsers.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fa42765434d6c2072537f7f5cc2342780bcd1c17e6d509100ac3592c18901f3 +size 557744 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/reshape.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/reshape.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..ee4ab2291f2fde3ef350f1ebc54b2f335971bd16 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/reshape.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b312fc59d28bc7121900c2eca422c5c0bb8e567c4836dd8726ae3159862440f7 +size 287904 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sas.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sas.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..9d7f410d25dc255534fab72ea8aadf12f74a6db6 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sas.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eba49c0371378321b2edd70e9ddb829c9848ebad09469a47783b19173b1ec89c +size 250304 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sparse.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sparse.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..03a258f15611f75707167e517dfd01203bc98dc3 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/sparse.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6831074f9663d8faa462bb19aa41b38577c4cab5e4fc8c6b1c8a5bd662fd5c5 +size 836704 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/testing.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/testing.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..fe31a71e054be7023c7a82c5e7ce583d4dff54ca --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/testing.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79e191e6cd8c38fb61adab7d4a200d81067cd8ac99bd9c8a09f232c01a8b1098 +size 115232 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslib.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslib.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f7882787fe06cf9f191f4ab94d408ad3c0d3b7ab --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslib.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39e75e9d5b71f40470cc5cb3cf6f9a2b10052b3e035d1a7c3e0a6b3580a5036a +size 324096 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..bf9e793e94816ca1d3e8cd49bf5e20a2ab96bfe4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ec5964a12fc6a7f0e4b6f9461e26e65cc37cf68de686cf3d66fcc7cab941f63 +size 295744 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..01b16ca88f15a9cb430310afee2591e1d9902583 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05de15a545de28b58565fb5577ea5cb828e67bc2b85d35adcb9b8436ccfc15dc +size 190400 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..5a26e8246cb4143b1efb09008c04d41b4ba0f991 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7388e93c2f22b2becbd475d63c2502e0b87258d4db0eda5a7fdac651cb3a2772 +size 324352 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..87ba11c5869e21ceda37e1e73e47d8afb013233f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ae185ef1ce759eb57286d3dddbba7747126945d5b4f26b9b73d98b1d6ecfbf0 +size 225088 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..dda4b8b08b174782aa59a1dc753dedb6a6d30007 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db52509bbdab5071fc51c352e23056777d02c8a1c3024a3f9325aeaa82c60f7c +size 135840 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..5296bd6f8104a69bec2fb8a6221a465bec96707f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0ed0f76a544f08bb93fc59ef20406529fb2e9d75c574f930d71322a77c99063 +size 1008048 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..975c8266121f551ba36326fda0049ca0cea1f673 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8d2e4e5e60ff4e8f836f9cf7cec70c4653e9bef8197fac33cc1c54d61326faa +size 419984 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/period.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/period.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f4b2edcfe67635d282ee426fdba2a14da836e66c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/period.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7529be272ec84100a59ace59df7ae5cab1647517069ca65b3928bcd6fdafc013 +size 499200 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..1e2457d12bc965cc4567c880c95322ef2b1e1169 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b38020f86a8a61badd567e2ae4d5cfb3108001bf0b744c84be9bce9007a4f1a8 +size 373856 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..bbdf577e27c7719dfa6a9e4a330436a1a4f1cd60 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e23f2a1884ba57154fbba7ff33d5826397fc528f30a3955cd745161b18bd35f1 +size 586656 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..74e5c039d2f723c1c36c65415e44203d6e25e448 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a62ebe53c9ae60bd9eb8b25bb536d79ae63f9081e2d38a6638d0b6e4f95c7987 +size 615984 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..d92f116480475f352ae3e0214eac344f9e8fc3b9 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8caf0f5863a82d7408bce00ed5798069c87b5048cababc436551ccb7a25707a6 +size 274464 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..accb73afbcb4d03b1c801be8d6b814c426b7319b --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d46ac3eeb300d9379382bbd7b291ba2f725a874e85d27c6383d408beef915211 +size 312192 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..4210bce7031e19f8f16103ad0e7f22057713e661 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e308c793c3737ec19ca783f75620fafaaaa9e229926c820b9a8bfac45cb5cdde +size 237888 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/aggregations.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/aggregations.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..d3d8d09abebeda48eece134f98779165bca2df25 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/aggregations.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78bf029820541e2465c29981cd33d217a459428105e188d83585cb835388babc +size 390192 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/indexers.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/indexers.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..0297d26b8184c68c8e646aed91849853af5a5d66 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/window/indexers.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f643c559bfd06d658887b52729d490cf79cb175388ac91e25b0a117efab6f3c +size 200512 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/writers.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/writers.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..a5b2e5aaafa9e5d3a6736c349e3ba4330a6a39fd --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/_libs/writers.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5fb8dc3e4a19a760d28169da33e64c0215b05d4daf88469a1d6c4f2c0229e98 +size 238528 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9190d215cf56e8d6dc5642085bbdc6e4dd30fefd --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/frame.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fff1e3700903d9d57e146e8d6f2f69e2344857b79b986fc44561e8121e507383 +size 362954 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/generic.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/generic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4a50954ed9d4f8ef373a308581bef38398d6b17 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/generic.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d391e3d09c542493156082f66f3ebadbc79ddd095aee71481c53685f313331b7 +size 386213 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc14822d4b28eb13b6b0997f9e4c6acc8313b238 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/__pycache__/series.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b74153fe1cb554d8b8c5d1900f92b6c9343dc20ec88ad268a6db6a7cff2ed0b4 +size 176374 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83c2936cb6f074c28686be37250da2b3e5cf9297 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc2ced81cf16ada6767d90c536d1b9cabbc712570376418f52b11a6be315b7e5 +size 158761 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/base.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c444ce855986ef7b249ede85688b03c7690a855d --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/base.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ecc0ce7178c8088f88061e1b6eb844ea36c8bcbfb1ba6483ae53efa189c0021 +size 197113 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab89bbb77d6959b79f1a27b934886fd6f41ec4e3 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/indexes/__pycache__/multi.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ed9fa45f1829f49af4ca7e5117a617cb7e76d1c7db589c3155e5a2e3dd0fc31 +size 108999 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/strings/__pycache__/accessor.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/strings/__pycache__/accessor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3035bdf1e4bde9dad9d4d0704b5ce2109acec9f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/core/strings/__pycache__/accessor.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8395d05e212580a784af3b73f86fcf4a5a17082728009a13468216421c597ba +size 100959 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb37112ad81f6020eeb23b84eb32ca55cbd6da98 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/pytables.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:713170d0fab0e78c747457eef59d2c766e76946e6ab07d7579423c57261f1cf5 +size 139901 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..22e505d3e4fb443dfffde3e42d364dd37d16d31f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/pandas/io/__pycache__/stata.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f22d5abadcf02ff7ed5389303c68ea16c397c0a642fc17ac65ed456129adeee +size 102764 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/constants/__pycache__/_codata.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/constants/__pycache__/_codata.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e0cf74bce7ad057d6d87100be00def1d6f627c2 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/constants/__pycache__/_codata.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c6825be719ce1e5dc4aefb5382ece2c9677fe2c78567926628497a5e9cf92c9 +size 198653 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/integrate/__pycache__/_lebedev.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/integrate/__pycache__/_lebedev.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe7bf7e31ef29508a693212f65b6126deb50008f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/integrate/__pycache__/_lebedev.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75af2e49392ffbdb982cb2d78471c1154e508cc6c064640f97ca22a48c6935e8 +size 100215 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..dc384954c3f7335b3b8032850419269e67ce0730 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77494c45d9a5437d3d4e1ddaf782ea955ff8dc12777a964f61ca0b08f7d62edf +size 406513 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dfitpack.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dfitpack.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f222d0ceb0ba07216b9ef19b542ab80c02a589d2 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dfitpack.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55ae261f3d27c23f34f9d4c1328562bf5f4119a0f62d9aad40b37db02800ea92 +size 346377 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dierckx.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dierckx.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f43ba920a9a2d9b3896ed75839c15d554794f1f8 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_dierckx.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b62564d7141487c29a302e4fece250baef2af6fb459cd113e24db877eb3c18e4 +size 141921 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_interpnd.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_interpnd.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..7d2d5053f7ca307b84cf7e3989ceb3cff12ed45a --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_interpnd.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:007c7273f4f4d627c319bd19a7be1e7c291bf3de21e62e9ea14cb159d8dda003 +size 458840 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_ppoly.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_ppoly.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..da8f64d1292721f14a2dfe786fc828c2c0ced69f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_ppoly.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b21bae3804b4066b4d1e50f4876126c01d69e72486689a726c4122476b774e33 +size 466256 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rbfinterp_pythran.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rbfinterp_pythran.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..1a306a124b118dc08389ea0430c9c64f0a072077 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rbfinterp_pythran.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ca1148f26f0886dbdc5308afc31f110f0917c4ebef6e781fae5649f07ad78e5 +size 256728 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rgi_cython.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rgi_cython.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..a218c1dc91d875136ec3235b95103d784e1e80de --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/_rgi_cython.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c15d6c7c229e4a0a8548d47fe9f1483992b9596cddd5368004625ba048d6e152 +size 295704 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/bug-1310.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/bug-1310.npz new file mode 100644 index 0000000000000000000000000000000000000000..8bddf805c36b29dc449556c27a2b489691f841af --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/bug-1310.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d6803c0b398f2704c236f1d1b9e8e5ede06bd165a0abb0f228281abbd455ae9 +size 2648 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/estimate_gradients_hang.npy b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/estimate_gradients_hang.npy new file mode 100644 index 0000000000000000000000000000000000000000..c5ef8f63f263a476823ddeacf2571551c2fe4690 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/estimate_gradients_hang.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:406c10857417ff5ea98d8cd28945c9d0e4f5c24f92a48ad0e8fab955bf2477f1 +size 35680 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/gcvspl.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/gcvspl.npz new file mode 100644 index 0000000000000000000000000000000000000000..50e9348dcca79eae861e67092add93cdb8ff1ca3 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/interpolate/tests/data/gcvspl.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03ce8155a6cba0c1bf0a2441a10c228191f916dec36cb820723429811296bba8 +size 3138 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_15_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_15_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..660bbb41b7fad43ed945dc701693451ceb60166c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_15_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13f3e1491a876bbf59d7ea10ad29c1f9b5996a2ab99216f31d5bfcd659012c1e +size 34462 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_18_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_18_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..0b3d569a1a65e9b5ff153ae4121a6a5a69409f7c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_18_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59f839467f2752b7df6fb6d4094396edd32a5929b764f7ffa1e6666431e6cac6 +size 161487 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_19_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_19_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..90168ad4e888fba29a772ee13798ec126016140e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_19_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38e8fc7b041df0b23d7e5ca15ead1a065e6467611ef9a848cc7db93f80adfd87 +size 34050 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_20_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_20_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..87266deb46238307347362b63a4878f2565baf56 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_20_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14e222d34a7118c7284a1675c6feceee77b84df951a5c6ba2a5ee9ff3054fa1d +size 31231 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_6_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_6_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..35d1681786c95602c4f0d5260fc5ad0ff4236189 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/carex_6_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b2a0736b541ebf5c4b9b4c00d6dab281e73c9fb9913c6e2581a781b37b602f9 +size 15878 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/gendare_20170120_data.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/gendare_20170120_data.npz new file mode 100644 index 0000000000000000000000000000000000000000..ff967f2ca0d0868aacf7d7e67402599e64bab817 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/linalg/tests/data/gendare_20170120_data.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3dfab451d9d5c20243e0ed85cd8b6c9657669fb9a0f83b5be165585783d55b5 +size 2164 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/__pycache__/_basic.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/__pycache__/_basic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..533b4d7fb5d6eb8b8eb98194171b7e002ac0dd19 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/__pycache__/_basic.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97cec0fbd66795f59ecf8dd875b7482c1034616f6e6f5e6b601d2ea5a152c7a9 +size 103156 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ellip_harm_2.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ellip_harm_2.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..2e63a8f9c69e79b7253dfd3f65de79b9414b1b0c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ellip_harm_2.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6c59efae8add1fbaa406a1007eca03df58ac39e657b175d7cb82682ce37369f +size 138273 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_gufuncs.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_gufuncs.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..e33115b48656237ef1640a0ab00febfaca270d0f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_gufuncs.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0dcb85c17e1f35a460118de33ee035ed288899f2a982f9ec6d5a9c96525fd40 +size 753248 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_specfun.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_specfun.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..99ed6639cebccccea89e5b54cc62aea2c504b95d --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_specfun.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5dd4491c6c8e8aba2910a779ed53a66b52e3dcd527307833b8e07c7cfff6371b +size 296576 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_special_ufuncs.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_special_ufuncs.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..8e1b36fa40bb20e5c9b4e597b4dedc9039b080f0 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_special_ufuncs.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d45be6bf592c88a6cd24910bb2ea3f254bbdac51e9edcfd1a3e3a43370d8b3c5 +size 1464544 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_test_internal.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_test_internal.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..8a44a0560cbd5236cebe656fe01b7f95c95c9c4e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_test_internal.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e79981d76d0d8a96f938c497a0ec5cb79dba27878bef9a21edc6ec0d19d20f7 +size 259384 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f321d20aa43606b0e0f9e2bbf746a568cd5fec32 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1efe233bd4d377b622ad09f65035bf89e2dfcbec5a0b0444c2e235c5773acd7 +size 1626249 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3c4f41246d86f6188b6158f4d50ccfdad3cf9691 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/_ufuncs_cxx.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db8e1becc2f6b5fb6395c5201e9658833405ca3e7da13451f59e024d953ba869 +size 1855408 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/cython_special.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/cython_special.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3311bd93c38aec62dad8c1c88493e787e1976345 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/cython_special.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:98c15a13918f4ab1918da480d67567028dc6d3c7da74e23cafdec706e54c9c57 +size 3516056 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/boost.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/boost.npz new file mode 100644 index 0000000000000000000000000000000000000000..a3cba7656ee5445c3c94b8695f526de05973cadf --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/boost.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d73ecbbb51654522342ba0470a6263a9684e617c2b8374565fe3a79593f4b231 +size 1270643 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/gsl.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/gsl.npz new file mode 100644 index 0000000000000000000000000000000000000000..b090dae17b5b0403f7c4919c46a464a09509aeab --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/gsl.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:acab700208cbb301ee51eb1f512cb1c27e7b4e7533fc5a5f5cd5c5d6aa197dd8 +size 51433 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/local.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/local.npz new file mode 100644 index 0000000000000000000000000000000000000000..7a1d159f5fa6dc3c5521bda8cf3049ee24945857 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/special/tests/data/local.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:102b876c27ec4d2f8041d5ab2fb6dfefc8147021335160f515455e53e06871ff +size 203438 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dcaded3134a098a690adb5e4cfe45cbd5b314979 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_continuous_distns.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68bdb72722c66fffc143f4de68bc0de3594c1dcec2ae6dea4c4c87359e4cc75a +size 387036 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69b4ec4252c8e5ec720f2694c53f2a07a8a0e869 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distn_infrastructure.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2a66bae3a86ac7ef16acf837883d56c3ab348de3c70a5d7aa9314a0bf620b7c +size 125859 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distribution_infrastructure.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distribution_infrastructure.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d5239af41b3a7ff0c7eaf48fcbede5f44348b5e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_distribution_infrastructure.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c30dca3c4b67d304f955fdc1951ceed32e1db012711aecc6eac3d950bf8a42dc +size 154130 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_morestats.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_morestats.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b45648ceabf2a2b6e468cc2cb4cd93fd7503c3c3 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_morestats.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7381b61e9e0344463f2161a28a19f1a93cc1edbef0bfef9ee0e980d5b1889bd +size 153736 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ea8e5b80e44b3be77270e217dc69ef45d2eb628 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:066842ee96321f8e2df5de09f568d9fd9b9427541e712f15e2dedc7366bf0c6d +size 111439 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_multivariate.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_multivariate.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6568098ff76641988b91b82c416fc13cc95aba28 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_multivariate.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ced30879480c11d600d99aee9d35ecc25515dcf8faeb28bce2cd9418e29a6b78 +size 224591 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7252d3d1938da97de2d4af4de293fb0d9149f10 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/__pycache__/_stats_py.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd58b47b5230e1aa22796e8e8f9128a5227c1790a0c6df183e1f6b92bfc9c6f1 +size 358580 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_ansari_swilk_statistics.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_ansari_swilk_statistics.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3f03dc2cd1594327e9d825b8f500eaf9d56255bf --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_ansari_swilk_statistics.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cba1a5447ccb54e94333676e6b494123f78c85ca8bebe7afb476044281dea5b +size 278232 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_biasedurn.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_biasedurn.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..7334887d774882ffaf34f9a9f66f98135a7f417f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_biasedurn.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:580af86d75988e8c3fa2caab4352304ca80a276da4145886c6547a7dd929a910 +size 323168 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_qmc_cy.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_qmc_cy.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..e12fddfcc060c4e6e3f52403640db4ae54320914 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_qmc_cy.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f629448edfbfb03307f3fc3617c5d84a1f8e8b980b50d959d53988471683d30 +size 291104 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..206253142adbc4f6b44bde5e63d5ba2e072f3385 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_rcont/rcont.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce2eae10f97cf87e7923be222c727d15e11900e2035abc06ff4d3278705442f7 +size 263128 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..7b9f3565965a62100149fbc9baffc04ad04f70e1 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e8fda1d4508eb82ec491dac4472dfb071487d3f9666d544844833eb57d663e5 +size 404048 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol_direction_numbers.npz b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol_direction_numbers.npz new file mode 100644 index 0000000000000000000000000000000000000000..44f1f1e9ebd1eb188289ca9adb8027855c1a23b6 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_sobol_direction_numbers.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4859931147d42ce465b8605cb277f957d98b839d03194fdf06579357906d193b +size 589334 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..fa3160c648b8a9f57bdb701af0c6945c1e94d32f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:470c2ee5d1e7219f908193d03693f642fae4c01d33f3c7e1c11491ba093d98c8 +size 766544 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats_pythran.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats_pythran.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..182a480920df74aeb584c98e47761c47674c8954 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_stats_pythran.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:433df3699d3ca5610697eeea3d2fcbadd6b70ccc6f2309e65bffc9e33b97aa56 +size 182128 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_unuran/unuran_wrapper.cpython-310-x86_64-linux-gnu.so b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_unuran/unuran_wrapper.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..9b0c42399e9ab626c250bef0e5b2434515aebea7 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/scipy/stats/_unuran/unuran_wrapper.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01b20c23e9ca7c868b86b6bd109e153f4e21bd6aeec88522ef93976c6f4c2e7d +size 1589832 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d403eb5cd279a7f94f1229d6a5b6feb18e32882 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9851ee0c9c4e182f6891406240a38ddf7d32152a8b367f7512646e114881fc35 +size 116997 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/function.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/function.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b40a0a796883e88d98c922598844ab8b3e87113 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/function.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22659f202e06b685108942b5886915aad83d63b451a07cc418e9f2a7bab95016 +size 101961 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/numbers.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/numbers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..63bdbc0a31113ae76e0eed77ed4e72177ef9209c --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/core/__pycache__/numbers.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3480308e7f8ab21aab927319d4deeb0f932aa0e3b88706322c8b1a1244ebc927 +size 121335 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/logic/__pycache__/boolalg.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/logic/__pycache__/boolalg.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c53a2fe203b066327b6b2b3ee8129b40fa395066 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/logic/__pycache__/boolalg.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03a7d79a3ec22b8571ea98bb03dc65275e01f884c4f55bf6ac908a62adbf4bfb +size 102763 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/matrices/__pycache__/matrixbase.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/matrices/__pycache__/matrixbase.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51403c8d277b80291f27df9924d9523ddb28c854 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/matrices/__pycache__/matrixbase.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e3964e6f9e1377a1c58b1d2541bc2e822a19fd9f277b0945c2aa7dd3beb3419a +size 165500 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_and.png b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_and.png new file mode 100644 index 0000000000000000000000000000000000000000..07cac5b54f8a39774c151fc70a00552ba83fe5fc --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_and.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:115d0b9b81ed40f93fe9e216b4f6384cf71093e3bbb64a5d648b8b9858c645a0 +size 6864 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_not.png b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_not.png new file mode 100644 index 0000000000000000000000000000000000000000..51c241e9825c28047cdd31fd64020ecb956a19e4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_not.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dceffdfe73d6d78f453142c4713e51a88dbe9361f79c710b6df2400edd9c3bc9 +size 7939 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_or.png b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_or.png new file mode 100644 index 0000000000000000000000000000000000000000..7f9cc7cf23bec219b8d6101c4cbae235a2c678d1 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_or.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e41ba0d3dbf2a20f82bb79a4cbba5bb458dec396ccbdba5ed195d6b200ca7f2e +size 8809 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_xor.png b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_xor.png new file mode 100644 index 0000000000000000000000000000000000000000..cafdc56f650a8c4d7af38fdfd8206891aa9d6cc2 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/plotting/tests/test_region_xor.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92e71558103d03df0ea5c47876277968b5d4ca8ab8cf43b80b73cce9d962052c +size 10002 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a128c9fc0cb1d107ea5b96b79643281971e0a302 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polyquinticconst.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25cf08f4d6f846ce72801825b348d884f22eaf1b983ef878e97c356ad9fde5ab +size 132169 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polytools.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polytools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a77f3c955aadf55dbe9892f455a58450c57c2cf4 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/__pycache__/polytools.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01a0353a97dd83e140abab00463a5549824d9ea42fedac9d36f49d1bd747179c +size 193446 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fad28bc4fc0a1ee0a740ed7d63cd908c14fc92a0 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:280e1fe4d1f26ee726579a0f6d2cb6444ecc17fbbb948eacb74e88fa8c8882b4 +size 114887 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d995c0c76c82e33f096e507321ed45dee0ab1ab0 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91262a550b60b33cc07d3a3bd547bc06768089be4e477446cd47a030fcb004a0 +size 119501 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solvers.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solvers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf3b53b3870a8a461d9a01a6b2da3a35bbbe9c6f --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solvers.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0f9f0857c97a1189ce0fab7d76f402238c380d8a9995f85dccd7bae1d3bd1f2 +size 100944 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solveset.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solveset.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb5750732f520ff79fcec638d363b01a54ffeb79 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/__pycache__/solveset.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f18769b58dc15220291e4e2493f2c2cce2ccd06ca9ca7f5d5262998bb899b25f +size 111953 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c25b21e23874a2fe73fd97fe8e885805479f765 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/diophantine/__pycache__/diophantine.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a49be5912745a406237e0341d87a64bd0c85f8a3fe5d20ab0e42570c420fcab +size 107101 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3aa9bb4d26f4f1d998d87525dd31c39fc9b8bc9 --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/ode.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ec7bb1e55d3c932c56fd2f41c5f130e98fb35304cbb640037278d19aae5ee52 +size 121461 diff --git a/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f27f814bbc17f5b9bf711b59fd7f9a0aa146a8e --- /dev/null +++ b/Scripts_Climate_to_LAI/.venv/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ccff6b3e36dd7bb26f03e1c613655991581d74956349948da6703ab928524c67 +size 105152 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python new file mode 100644 index 0000000000000000000000000000000000000000..70f41baa9a6e156c35b7f0c5d15a5bea8ce5e22f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6bca2b84e73c7775a0dd5e6a76899cfe4ee62863d7c8f88513811d1fda23f49 +size 5937704 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3 b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3 new file mode 100644 index 0000000000000000000000000000000000000000..70f41baa9a6e156c35b7f0c5d15a5bea8ce5e22f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6bca2b84e73c7775a0dd5e6a76899cfe4ee62863d7c8f88513811d1fda23f49 +size 5937704 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3.10 b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3.10 new file mode 100644 index 0000000000000000000000000000000000000000..70f41baa9a6e156c35b7f0c5d15a5bea8ce5e22f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/bin/python3.10 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6bca2b84e73c7775a0dd5e6a76899cfe4ee62863d7c8f88513811d1fda23f49 +size 5937704 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7d3c816af16cfc0592bbff2a83e28259614f4e8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/__pycache__/Image.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a159c16a0ba75ff6ed45ab1f8884c3c819063601d517f003fec4948a19dc67d5 +size 122564 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..43fae77180cadfe54e012b44dd9146102255d273 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imaging.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a98a5a999a98a80b5fc983c5d1e7c3b908bb7bb2fd63e958f798f337f318844c +size 3345225 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..cff504c62a7c39ce8f05c01e8f698fb3c4bead89 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingcms.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d33a3a95cd002e3e842fcb3fc746c32769c0d872dd291d8b788b8f0673faf7a +size 141369 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..617865939a69fcddd9ec9d70ab60727fceb862c4 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingft.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:edbd7b4ebdb256658faee76146f44bba3b01123be80c423ed812be531446b45e +size 298297 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..1a892b72beb66dc64ac3815ab474431f0e46870c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7335e66653fa0d71d83cfcc1ecd787356d9aa026046cf03f8bcda31b1c33e766 +size 161896 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..2f1b8e15e5627d92f0521605c9870bc8e5505cb4 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..db029b770cd87a12086e70b1be9900c93d255f0b --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA @@ -0,0 +1,46 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 6.0.2 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.8 +License-File: LICENSE + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ddbad690867ccdd82059a1b28cad7fe8c9393843 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD @@ -0,0 +1,26 @@ +PyYAML-6.0.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060 +PyYAML-6.0.2.dist-info/RECORD,, +PyYAML-6.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyYAML-6.0.2.dist-info/WHEEL,sha256=baMMpUvyD0gnRdCe6fvqCg8rft4FNTdLqZQ01WfKJmc,152 +PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311 +yaml/_yaml.cpython-310-x86_64-linux-gnu.so,sha256=20HV-cVpIFuOuVUTmQ1-PQIbyt0n8ctfXq7JCMIfbrU,2383664 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/REQUESTED b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..661c896c5ec129044b1f11b372397ba070a4fb58 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.44.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6475e911f628412049bc4090d86f23ac403adde --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/_virtualenv.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/_virtualenv.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31dd572d874e93b01b32a30224b7e849773561be Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/_virtualenv.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9acb6f149f954051406664688ec748eff302de7e Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8f380a6136b71d1cf824c299c20606f33841fef Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/_virtualenv.pth b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/_virtualenv.pth new file mode 100644 index 0000000000000000000000000000000000000000..74d2eb6578fd774c6d8e324cd2fad5efb8beb747 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/_virtualenv.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69ac3d8f27e679c81b94ab30b3b56e9cd138219b1ba94a1fa3606d5a76a1433d +size 18 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/INSTALLER b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/LICENSE b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..93e41fb6965a930641ba181a3ae4e1008246d9d5 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2021-2025, ContourPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/METADATA b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..4f04341e81058ea488cfb8ba68a62a4269889a47 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/METADATA @@ -0,0 +1,94 @@ +Metadata-Version: 2.1 +Name: contourpy +Version: 1.3.2 +Summary: Python library for calculating contours of 2D quadrilateral grids +Author-Email: Ian Thomas +License: BSD 3-Clause License + + Copyright (c) 2021-2025, ContourPy Developers. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: C++ +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Scientific/Engineering :: Information Analysis +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Visualization +Project-URL: Homepage, https://github.com/contourpy/contourpy +Project-URL: Changelog, https://contourpy.readthedocs.io/en/latest/changelog.html +Project-URL: Documentation, https://contourpy.readthedocs.io +Project-URL: Repository, https://github.com/contourpy/contourpy +Requires-Python: >=3.10 +Requires-Dist: numpy>=1.23 +Provides-Extra: docs +Requires-Dist: furo; extra == "docs" +Requires-Dist: sphinx>=7.2; extra == "docs" +Requires-Dist: sphinx-copybutton; extra == "docs" +Provides-Extra: bokeh +Requires-Dist: bokeh; extra == "bokeh" +Requires-Dist: selenium; extra == "bokeh" +Provides-Extra: mypy +Requires-Dist: contourpy[bokeh,docs]; extra == "mypy" +Requires-Dist: bokeh; extra == "mypy" +Requires-Dist: docutils-stubs; extra == "mypy" +Requires-Dist: mypy==1.15.0; extra == "mypy" +Requires-Dist: types-Pillow; extra == "mypy" +Provides-Extra: test +Requires-Dist: contourpy[test-no-images]; extra == "test" +Requires-Dist: matplotlib; extra == "test" +Requires-Dist: Pillow; extra == "test" +Provides-Extra: test-no-images +Requires-Dist: pytest; extra == "test-no-images" +Requires-Dist: pytest-cov; extra == "test-no-images" +Requires-Dist: pytest-rerunfailures; extra == "test-no-images" +Requires-Dist: pytest-xdist; extra == "test-no-images" +Requires-Dist: wurlitzer; extra == "test-no-images" +Description-Content-Type: text/markdown + +ContourPy + +ContourPy is a Python library for calculating contours of 2D quadrilateral grids. It is written in C++11 and wrapped using pybind11. + +It contains the 2005 and 2014 algorithms used in Matplotlib as well as a newer algorithm that includes more features and is available in both serial and multithreaded versions. It provides an easy way for Python libraries to use contouring algorithms without having to include Matplotlib as a dependency. + + * **Documentation**: https://contourpy.readthedocs.io + * **Source code**: https://github.com/contourpy/contourpy + +| | | +| --- | --- | +| Latest release | [![PyPI version](https://img.shields.io/pypi/v/contourpy.svg?label=pypi&color=fdae61)](https://pypi.python.org/pypi/contourpy) [![conda-forge version](https://img.shields.io/conda/v/conda-forge/contourpy.svg?label=conda-forge&color=a6d96a)](https://anaconda.org/conda-forge/contourpy) | +| Downloads | [![PyPi downloads](https://img.shields.io/pypi/dm/contourpy?label=pypi&style=flat&color=fdae61)](https://pepy.tech/project/contourpy) | +| Python version | [![Platforms](https://img.shields.io/pypi/pyversions/contourpy?color=fdae61)](https://pypi.org/project/contourpy/) | +| Coverage | [![Codecov](https://img.shields.io/codecov/c/gh/contourpy/contourpy?color=fdae61&label=codecov)](https://app.codecov.io/gh/contourpy/contourpy) | diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/RECORD b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..b89155147dca2c2dd1d750dd4ca5cde5f7b521a1 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/RECORD @@ -0,0 +1,26 @@ +contourpy-1.3.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +contourpy-1.3.2.dist-info/LICENSE,sha256=NBcJefxk9PXm36Zu8n3sMU___FhSAAxg9INuwd-_FW4,1534 +contourpy-1.3.2.dist-info/METADATA,sha256=DYYPfLivjoUAXCCUmeDKJdqFFbS2eRXui4i4hSwxbAg,5461 +contourpy-1.3.2.dist-info/RECORD,, +contourpy-1.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +contourpy-1.3.2.dist-info/WHEEL,sha256=sZM_NeUMz2G4fDenMf11eikcCxcLaQWiYRmjwQBavQs,137 +contourpy/__init__.py,sha256=Vi2YbtUhM9VxYPY3PBvxfu0xZYr6fBysl5gQPJEo88k,11831 +contourpy/_contourpy.cpython-310-x86_64-linux-gnu.so,sha256=8it_MHwflBYc2jRua5KVBXY14gZO7MuH6GAuQVm8V-E,854312 +contourpy/_contourpy.pyi,sha256=fvtccxkiZwGb6qYag7Fp4E8bsFmAIjAmobf8LNxqfgc,7122 +contourpy/_version.py,sha256=HgKA3RqZvC7slo8MgLyffCGwJbQ3cY6I7oUMFvGLWps,22 +contourpy/array.py,sha256=4WwLuiZe30rizn_raymmY13OzE6hlCsDOO8kuVFOP18,8979 +contourpy/chunk.py,sha256=8njDQqlpuD22RjaaCyA75FXQsSQDY5hZGJSrxFpvGGU,3279 +contourpy/convert.py,sha256=mhyn7prEoWCnf0igaH-VqDwlk-CegFsZ4qOy2LL-hpU,26154 +contourpy/dechunk.py,sha256=EgFL6hw5H54ccuof4tJ2ehdnktT7trgZjiZqppsH8QI,7756 +contourpy/enum_util.py,sha256=o8MItJRs08oqzwPP3IwC75BBAY9Qq95saIzjkXBXwqA,1519 +contourpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +contourpy/typecheck.py,sha256=t1nvvCuKMYva1Zx4fc30EpdKFcO0Enz3n_UFfXBsq9o,10747 +contourpy/types.py,sha256=2K4T5tJpMIjYrkkg1Lqh3C2ZKlnOhnMtYmtwz92l_y8,247 +contourpy/util/__init__.py,sha256=eVhJ_crOHL7nkG4Kb0dOo7NL4WHMy_Px665aAN_3d-8,118 +contourpy/util/_build_config.py,sha256=uFmEfpQ4-HVkN0zZi7c2060J1i2uoaPsvkS4pf0XdiY,1848 +contourpy/util/bokeh_renderer.py,sha256=sOAZKyN11296muuLQ4uhQYCM4eaMwjnxUFrbbIxucmY,13959 +contourpy/util/bokeh_util.py,sha256=wc-S3ewBUYWyIkEv9jkhFySIergjLQl4Z0UEVnE0HhA,2804 +contourpy/util/data.py,sha256=-7SSGMLX_gN-1H2JzpNSEB_EcEF_uMtYdOo_ePRIcg8,2586 +contourpy/util/mpl_renderer.py,sha256=qPWMxDL_n-4dg9gOIHX-Va2UQz6n98eJdIRszpYr2AU,20125 +contourpy/util/mpl_util.py,sha256=0Jz5f-aA9XMWlpO2pDnHbkVgxIiw4SY_ysxf_gACWEo,3452 +contourpy/util/renderer.py,sha256=8CBHzPmVsFPfqsWxqrxGBhqFpJhVeFHFeDzVXAgT8Fc,5118 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/REQUESTED b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/WHEEL b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..4e4c38ae320920b8f083b87f408214cdecd350d2 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy-1.3.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: meson +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..33c1f014b867cc069d532839b405c3f7696c5a22 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/__init__.py @@ -0,0 +1,285 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from contourpy._contourpy import ( + ContourGenerator, + FillType, + LineType, + Mpl2005ContourGenerator, + Mpl2014ContourGenerator, + SerialContourGenerator, + ThreadedContourGenerator, + ZInterp, + max_threads, +) +from contourpy._version import __version__ +from contourpy.chunk import calc_chunk_sizes +from contourpy.convert import ( + convert_filled, + convert_lines, + convert_multi_filled, + convert_multi_lines, +) +from contourpy.dechunk import ( + dechunk_filled, + dechunk_lines, + dechunk_multi_filled, + dechunk_multi_lines, +) +from contourpy.enum_util import as_fill_type, as_line_type, as_z_interp + +if TYPE_CHECKING: + from typing import Any + + from numpy.typing import ArrayLike + + from ._contourpy import CoordinateArray, MaskArray + +__all__ = [ + "__version__", + "contour_generator", + "convert_filled", + "convert_lines", + "convert_multi_filled", + "convert_multi_lines", + "dechunk_filled", + "dechunk_lines", + "dechunk_multi_filled", + "dechunk_multi_lines", + "max_threads", + "FillType", + "LineType", + "ContourGenerator", + "Mpl2005ContourGenerator", + "Mpl2014ContourGenerator", + "SerialContourGenerator", + "ThreadedContourGenerator", + "ZInterp", +] + + +# Simple mapping of algorithm name to class name. +_class_lookup: dict[str, type[ContourGenerator]] = { + "mpl2005": Mpl2005ContourGenerator, + "mpl2014": Mpl2014ContourGenerator, + "serial": SerialContourGenerator, + "threaded": ThreadedContourGenerator, +} + + +def _remove_z_mask( + z: ArrayLike | np.ma.MaskedArray[Any, Any] | None, +) -> tuple[CoordinateArray, MaskArray | None]: + # Preserve mask if present. + z_array = np.ma.asarray(z, dtype=np.float64) # type: ignore[no-untyped-call] + z_masked = np.ma.masked_invalid(z_array, copy=False) # type: ignore[no-untyped-call] + + if np.ma.is_masked(z_masked): # type: ignore[no-untyped-call] + mask = np.ma.getmask(z_masked) # type: ignore[no-untyped-call] + else: + mask = None + + return np.ma.getdata(z_masked), mask # type: ignore[no-untyped-call] + + +def contour_generator( + x: ArrayLike | None = None, + y: ArrayLike | None = None, + z: ArrayLike | np.ma.MaskedArray[Any, Any] | None = None, + *, + name: str = "serial", + corner_mask: bool | None = None, + line_type: LineType | str | None = None, + fill_type: FillType | str | None = None, + chunk_size: int | tuple[int, int] | None = None, + chunk_count: int | tuple[int, int] | None = None, + total_chunk_count: int | None = None, + quad_as_tri: bool = False, + z_interp: ZInterp | str | None = ZInterp.Linear, + thread_count: int = 0, +) -> ContourGenerator: + """Create and return a :class:`~.ContourGenerator` object. + + The class and properties of the returned :class:`~.ContourGenerator` are determined by the + function arguments, with sensible defaults. + + Args: + x (array-like of shape (ny, nx) or (nx,), optional): The x-coordinates of the ``z`` values. + May be 2D with the same shape as ``z.shape``, or 1D with length ``nx = z.shape[1]``. + If not specified are assumed to be ``np.arange(nx)``. Must be ordered monotonically. + y (array-like of shape (ny, nx) or (ny,), optional): The y-coordinates of the ``z`` values. + May be 2D with the same shape as ``z.shape``, or 1D with length ``ny = z.shape[0]``. + If not specified are assumed to be ``np.arange(ny)``. Must be ordered monotonically. + z (array-like of shape (ny, nx), may be a masked array): The 2D gridded values to calculate + the contours of. May be a masked array, and any invalid values (``np.inf`` or + ``np.nan``) will also be masked out. + name (str): Algorithm name, one of ``"serial"``, ``"threaded"``, ``"mpl2005"`` or + ``"mpl2014"``, default ``"serial"``. + corner_mask (bool, optional): Enable/disable corner masking, which only has an effect if + ``z`` is a masked array. If ``False``, any quad touching a masked point is masked out. + If ``True``, only the triangular corners of quads nearest these points are always masked + out, other triangular corners comprising three unmasked points are contoured as usual. + If not specified, uses the default provided by the algorithm ``name``. + line_type (LineType or str, optional): The format of contour line data returned from calls + to :meth:`~.ContourGenerator.lines`, specified either as a :class:`~.LineType` or its + string equivalent such as ``"SeparateCode"``. + If not specified, uses the default provided by the algorithm ``name``. + The relationship between the :class:`~.LineType` enum and the data format returned from + :meth:`~.ContourGenerator.lines` is explained at :ref:`line_type`. + fill_type (FillType or str, optional): The format of filled contour data returned from calls + to :meth:`~.ContourGenerator.filled`, specified either as a :class:`~.FillType` or its + string equivalent such as ``"OuterOffset"``. + If not specified, uses the default provided by the algorithm ``name``. + The relationship between the :class:`~.FillType` enum and the data format returned from + :meth:`~.ContourGenerator.filled` is explained at :ref:`fill_type`. + chunk_size (int or tuple(int, int), optional): Chunk size in (y, x) directions, or the same + size in both directions if only one value is specified. + chunk_count (int or tuple(int, int), optional): Chunk count in (y, x) directions, or the + same count in both directions if only one value is specified. + total_chunk_count (int, optional): Total number of chunks. + quad_as_tri (bool): Enable/disable treating quads as 4 triangles, default ``False``. + If ``False``, a contour line within a quad is a straight line between points on two of + its edges. If ``True``, each full quad is divided into 4 triangles using a virtual point + at the centre (mean x, y of the corner points) and a contour line is piecewise linear + within those triangles. Corner-masked triangles are not affected by this setting, only + full unmasked quads. + z_interp (ZInterp or str, optional): How to interpolate ``z`` values when determining where + contour lines intersect the edges of quads and the ``z`` values of the central points of + quads, specified either as a :class:`~contourpy.ZInterp` or its string equivalent such + as ``"Log"``. Default is ``ZInterp.Linear``. + thread_count (int): Number of threads to use for contour calculation, default 0. Threads can + only be used with an algorithm ``name`` that supports threads (currently only + ``name="threaded"``) and there must be at least the same number of chunks as threads. + If ``thread_count=0`` and ``name="threaded"`` then it uses the maximum number of threads + as determined by the C++11 call ``std::thread::hardware_concurrency()``. If ``name`` is + something other than ``"threaded"`` then the ``thread_count`` will be set to ``1``. + + Return: + :class:`~.ContourGenerator`. + + Note: + A maximum of one of ``chunk_size``, ``chunk_count`` and ``total_chunk_count`` may be + specified. + + Warning: + The ``name="mpl2005"`` algorithm does not implement chunking for contour lines. + """ + x = np.asarray(x, dtype=np.float64) + y = np.asarray(y, dtype=np.float64) + z, mask = _remove_z_mask(z) + + # Check arguments: z. + if z.ndim != 2: + raise TypeError(f"Input z must be 2D, not {z.ndim}D") + + if z.shape[0] < 2 or z.shape[1] < 2: + raise TypeError(f"Input z must be at least a (2, 2) shaped array, but has shape {z.shape}") + + ny, nx = z.shape + + # Check arguments: x and y. + if x.ndim != y.ndim: + raise TypeError(f"Number of dimensions of x ({x.ndim}) and y ({y.ndim}) do not match") + + if x.ndim == 0: + x = np.arange(nx, dtype=np.float64) + y = np.arange(ny, dtype=np.float64) + x, y = np.meshgrid(x, y) + elif x.ndim == 1: + if len(x) != nx: + raise TypeError(f"Length of x ({len(x)}) must match number of columns in z ({nx})") + if len(y) != ny: + raise TypeError(f"Length of y ({len(y)}) must match number of rows in z ({ny})") + x, y = np.meshgrid(x, y) + elif x.ndim == 2: + if x.shape != z.shape: + raise TypeError(f"Shapes of x {x.shape} and z {z.shape} do not match") + if y.shape != z.shape: + raise TypeError(f"Shapes of y {y.shape} and z {z.shape} do not match") + else: + raise TypeError(f"Inputs x and y must be None, 1D or 2D, not {x.ndim}D") + + # Check mask shape just in case. + if mask is not None and mask.shape != z.shape: + raise ValueError("If mask is set it must be a 2D array with the same shape as z") + + # Check arguments: name. + if name not in _class_lookup: + raise ValueError(f"Unrecognised contour generator name: {name}") + + # Check arguments: chunk_size, chunk_count and total_chunk_count. + y_chunk_size, x_chunk_size = calc_chunk_sizes( + chunk_size, chunk_count, total_chunk_count, ny, nx) + + cls = _class_lookup[name] + + # Check arguments: corner_mask. + if corner_mask is None: + # Set it to default, which is True if the algorithm supports it. + corner_mask = cls.supports_corner_mask() + elif corner_mask and not cls.supports_corner_mask(): + raise ValueError(f"{name} contour generator does not support corner_mask=True") + + # Check arguments: line_type. + if line_type is None: + line_type = cls.default_line_type + else: + line_type = as_line_type(line_type) + + if not cls.supports_line_type(line_type): + raise ValueError(f"{name} contour generator does not support line_type {line_type}") + + # Check arguments: fill_type. + if fill_type is None: + fill_type = cls.default_fill_type + else: + fill_type = as_fill_type(fill_type) + + if not cls.supports_fill_type(fill_type): + raise ValueError(f"{name} contour generator does not support fill_type {fill_type}") + + # Check arguments: quad_as_tri. + if quad_as_tri and not cls.supports_quad_as_tri(): + raise ValueError(f"{name} contour generator does not support quad_as_tri=True") + + # Check arguments: z_interp. + if z_interp is None: + z_interp = ZInterp.Linear + else: + z_interp = as_z_interp(z_interp) + + if z_interp != ZInterp.Linear and not cls.supports_z_interp(): + raise ValueError(f"{name} contour generator does not support z_interp {z_interp}") + + # Check arguments: thread_count. + if thread_count not in (0, 1) and not cls.supports_threads(): + raise ValueError(f"{name} contour generator does not support thread_count {thread_count}") + + # Prepare args and kwargs for contour generator constructor. + args = [x, y, z, mask] + kwargs: dict[str, int | bool | LineType | FillType | ZInterp] = { + "x_chunk_size": x_chunk_size, + "y_chunk_size": y_chunk_size, + } + + if name not in ("mpl2005", "mpl2014"): + kwargs["line_type"] = line_type + kwargs["fill_type"] = fill_type + + if cls.supports_corner_mask(): + kwargs["corner_mask"] = corner_mask + + if cls.supports_quad_as_tri(): + kwargs["quad_as_tri"] = quad_as_tri + + if cls.supports_z_interp(): + kwargs["z_interp"] = z_interp + + if cls.supports_threads(): + kwargs["thread_count"] = thread_count + + # Create contour generator. + return cls(*args, **kwargs) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_contourpy.pyi b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_contourpy.pyi new file mode 100644 index 0000000000000000000000000000000000000000..5f44eace52ab6df5f3d05443c89e17f363dd8174 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_contourpy.pyi @@ -0,0 +1,199 @@ +from typing import ClassVar, NoReturn, TypeAlias + +import numpy as np +import numpy.typing as npt + +import contourpy._contourpy as cpy + +# Input numpy array types, the same as in common.h +CoordinateArray: TypeAlias = npt.NDArray[np.float64] +MaskArray: TypeAlias = npt.NDArray[np.bool_] +LevelArray: TypeAlias = npt.ArrayLike + +# Output numpy array types, the same as in common.h +PointArray: TypeAlias = npt.NDArray[np.float64] +CodeArray: TypeAlias = npt.NDArray[np.uint8] +OffsetArray: TypeAlias = npt.NDArray[np.uint32] + +# Types returned from filled() +FillReturn_OuterCode: TypeAlias = tuple[list[PointArray], list[CodeArray]] +FillReturn_OuterOffset: TypeAlias = tuple[list[PointArray], list[OffsetArray]] +FillReturn_ChunkCombinedCode: TypeAlias = tuple[list[PointArray | None], list[CodeArray | None]] +FillReturn_ChunkCombinedOffset: TypeAlias = tuple[list[PointArray | None], list[OffsetArray | None]] +FillReturn_ChunkCombinedCodeOffset: TypeAlias = tuple[list[PointArray | None], list[CodeArray | None], list[OffsetArray | None]] +FillReturn_ChunkCombinedOffsetOffset: TypeAlias = tuple[list[PointArray | None], list[OffsetArray | None], list[OffsetArray | None]] +FillReturn_Chunk: TypeAlias = FillReturn_ChunkCombinedCode | FillReturn_ChunkCombinedOffset | FillReturn_ChunkCombinedCodeOffset | FillReturn_ChunkCombinedOffsetOffset +FillReturn: TypeAlias = FillReturn_OuterCode | FillReturn_OuterOffset | FillReturn_Chunk + +# Types returned from lines() +LineReturn_Separate: TypeAlias = list[PointArray] +LineReturn_SeparateCode: TypeAlias = tuple[list[PointArray], list[CodeArray]] +LineReturn_ChunkCombinedCode: TypeAlias = tuple[list[PointArray | None], list[CodeArray | None]] +LineReturn_ChunkCombinedOffset: TypeAlias = tuple[list[PointArray | None], list[OffsetArray | None]] +LineReturn_ChunkCombinedNan: TypeAlias = tuple[list[PointArray | None]] +LineReturn_Chunk: TypeAlias = LineReturn_ChunkCombinedCode | LineReturn_ChunkCombinedOffset | LineReturn_ChunkCombinedNan +LineReturn: TypeAlias = LineReturn_Separate | LineReturn_SeparateCode | LineReturn_Chunk + + +NDEBUG: int +__version__: str + +class FillType: + ChunkCombinedCode: ClassVar[cpy.FillType] + ChunkCombinedCodeOffset: ClassVar[cpy.FillType] + ChunkCombinedOffset: ClassVar[cpy.FillType] + ChunkCombinedOffsetOffset: ClassVar[cpy.FillType] + OuterCode: ClassVar[cpy.FillType] + OuterOffset: ClassVar[cpy.FillType] + __members__: ClassVar[dict[str, cpy.FillType]] + def __eq__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __ne__(self, other: object) -> bool: ... + def __setstate__(self, state: int) -> NoReturn: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +class LineType: + ChunkCombinedCode: ClassVar[cpy.LineType] + ChunkCombinedNan: ClassVar[cpy.LineType] + ChunkCombinedOffset: ClassVar[cpy.LineType] + Separate: ClassVar[cpy.LineType] + SeparateCode: ClassVar[cpy.LineType] + __members__: ClassVar[dict[str, cpy.LineType]] + def __eq__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __ne__(self, other: object) -> bool: ... + def __setstate__(self, state: int) -> NoReturn: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +class ZInterp: + Linear: ClassVar[cpy.ZInterp] + Log: ClassVar[cpy.ZInterp] + __members__: ClassVar[dict[str, cpy.ZInterp]] + def __eq__(self, other: object) -> bool: ... + def __getstate__(self) -> int: ... + def __hash__(self) -> int: ... + def __index__(self) -> int: ... + def __init__(self, value: int) -> None: ... + def __int__(self) -> int: ... + def __ne__(self, other: object) -> bool: ... + def __setstate__(self, state: int) -> NoReturn: ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + +def max_threads() -> int: ... + +class ContourGenerator: + def create_contour(self, level: float) -> LineReturn: ... + def create_filled_contour(self, lower_level: float, upper_level: float) -> FillReturn: ... + def filled(self, lower_level: float, upper_level: float) -> FillReturn: ... + def lines(self, level: float) -> LineReturn: ... + def multi_filled(self, levels: LevelArray) -> list[FillReturn]: ... + def multi_lines(self, levels: LevelArray) -> list[LineReturn]: ... + @staticmethod + def supports_corner_mask() -> bool: ... + @staticmethod + def supports_fill_type(fill_type: FillType) -> bool: ... + @staticmethod + def supports_line_type(line_type: LineType) -> bool: ... + @staticmethod + def supports_quad_as_tri() -> bool: ... + @staticmethod + def supports_threads() -> bool: ... + @staticmethod + def supports_z_interp() -> bool: ... + @property + def chunk_count(self) -> tuple[int, int]: ... + @property + def chunk_size(self) -> tuple[int, int]: ... + @property + def corner_mask(self) -> bool: ... + @property + def fill_type(self) -> FillType: ... + @property + def line_type(self) -> LineType: ... + @property + def quad_as_tri(self) -> bool: ... + @property + def thread_count(self) -> int: ... + @property + def z_interp(self) -> ZInterp: ... + default_fill_type: cpy.FillType + default_line_type: cpy.LineType + +class Mpl2005ContourGenerator(ContourGenerator): + def __init__( + self, + x: CoordinateArray, + y: CoordinateArray, + z: CoordinateArray, + mask: MaskArray, + *, + x_chunk_size: int = 0, + y_chunk_size: int = 0, + ) -> None: ... + +class Mpl2014ContourGenerator(ContourGenerator): + def __init__( + self, + x: CoordinateArray, + y: CoordinateArray, + z: CoordinateArray, + mask: MaskArray, + *, + corner_mask: bool, + x_chunk_size: int = 0, + y_chunk_size: int = 0, + ) -> None: ... + +class SerialContourGenerator(ContourGenerator): + def __init__( + self, + x: CoordinateArray, + y: CoordinateArray, + z: CoordinateArray, + mask: MaskArray, + *, + corner_mask: bool, + line_type: LineType, + fill_type: FillType, + quad_as_tri: bool, + z_interp: ZInterp, + x_chunk_size: int = 0, + y_chunk_size: int = 0, + ) -> None: ... + def _write_cache(self) -> NoReturn: ... + +class ThreadedContourGenerator(ContourGenerator): + def __init__( + self, + x: CoordinateArray, + y: CoordinateArray, + z: CoordinateArray, + mask: MaskArray, + *, + corner_mask: bool, + line_type: LineType, + fill_type: FillType, + quad_as_tri: bool, + z_interp: ZInterp, + x_chunk_size: int = 0, + y_chunk_size: int = 0, + thread_count: int = 0, + ) -> None: ... + def _write_cache(self) -> None: ... diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_version.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..f708a9b201c27dea1e1329bdb0da07f50d3fea38 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/_version.py @@ -0,0 +1 @@ +__version__ = "1.3.2" diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/array.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/array.py new file mode 100644 index 0000000000000000000000000000000000000000..a7054fae1dc2f39b2dc6789e60818b1dd9b49c55 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/array.py @@ -0,0 +1,261 @@ +from __future__ import annotations + +from itertools import chain, pairwise +from typing import TYPE_CHECKING + +import numpy as np + +from contourpy.typecheck import check_code_array, check_offset_array, check_point_array +from contourpy.types import CLOSEPOLY, LINETO, MOVETO, code_dtype, offset_dtype, point_dtype + +if TYPE_CHECKING: + import contourpy._contourpy as cpy + + +def codes_from_offsets(offsets: cpy.OffsetArray) -> cpy.CodeArray: + """Determine codes from offsets, assuming they all correspond to closed polygons. + """ + check_offset_array(offsets) + + n = offsets[-1] + codes = np.full(n, LINETO, dtype=code_dtype) + codes[offsets[:-1]] = MOVETO + codes[offsets[1:] - 1] = CLOSEPOLY + return codes + + +def codes_from_offsets_and_points( + offsets: cpy.OffsetArray, + points: cpy.PointArray, +) -> cpy.CodeArray: + """Determine codes from offsets and points, using the equality of the start and end points of + each line to determine if lines are closed or not. + """ + check_offset_array(offsets) + check_point_array(points) + + codes = np.full(len(points), LINETO, dtype=code_dtype) + codes[offsets[:-1]] = MOVETO + + end_offsets = offsets[1:] - 1 + closed = np.all(points[offsets[:-1]] == points[end_offsets], axis=1) + codes[end_offsets[closed]] = CLOSEPOLY + + return codes + + +def codes_from_points(points: cpy.PointArray) -> cpy.CodeArray: + """Determine codes for a single line, using the equality of the start and end points to + determine if the line is closed or not. + """ + check_point_array(points) + + n = len(points) + codes = np.full(n, LINETO, dtype=code_dtype) + codes[0] = MOVETO + if np.all(points[0] == points[-1]): + codes[-1] = CLOSEPOLY + return codes + + +def concat_codes(list_of_codes: list[cpy.CodeArray]) -> cpy.CodeArray: + """Concatenate a list of codes arrays into a single code array. + """ + if not list_of_codes: + raise ValueError("Empty list passed to concat_codes") + + return np.concatenate(list_of_codes, dtype=code_dtype) + + +def concat_codes_or_none(list_of_codes_or_none: list[cpy.CodeArray | None]) -> cpy.CodeArray | None: + """Concatenate a list of codes arrays or None into a single code array or None. + """ + list_of_codes = [codes for codes in list_of_codes_or_none if codes is not None] + if list_of_codes: + return concat_codes(list_of_codes) + else: + return None + + +def concat_offsets(list_of_offsets: list[cpy.OffsetArray]) -> cpy.OffsetArray: + """Concatenate a list of offsets arrays into a single offset array. + """ + if not list_of_offsets: + raise ValueError("Empty list passed to concat_offsets") + + n = len(list_of_offsets) + cumulative = np.cumsum([offsets[-1] for offsets in list_of_offsets], dtype=offset_dtype) + ret: cpy.OffsetArray = np.concatenate( + (list_of_offsets[0], *(list_of_offsets[i+1][1:] + cumulative[i] for i in range(n-1))), + dtype=offset_dtype, + ) + return ret + + +def concat_offsets_or_none( + list_of_offsets_or_none: list[cpy.OffsetArray | None], +) -> cpy.OffsetArray | None: + """Concatenate a list of offsets arrays or None into a single offset array or None. + """ + list_of_offsets = [offsets for offsets in list_of_offsets_or_none if offsets is not None] + if list_of_offsets: + return concat_offsets(list_of_offsets) + else: + return None + + +def concat_points(list_of_points: list[cpy.PointArray]) -> cpy.PointArray: + """Concatenate a list of point arrays into a single point array. + """ + if not list_of_points: + raise ValueError("Empty list passed to concat_points") + + return np.concatenate(list_of_points, dtype=point_dtype) + + +def concat_points_or_none( + list_of_points_or_none: list[cpy.PointArray | None], +) -> cpy.PointArray | None: + """Concatenate a list of point arrays or None into a single point array or None. + """ + list_of_points = [points for points in list_of_points_or_none if points is not None] + if list_of_points: + return concat_points(list_of_points) + else: + return None + + +def concat_points_or_none_with_nan( + list_of_points_or_none: list[cpy.PointArray | None], +) -> cpy.PointArray | None: + """Concatenate a list of points or None into a single point array or None, with NaNs used to + separate each line. + """ + list_of_points = [points for points in list_of_points_or_none if points is not None] + if list_of_points: + return concat_points_with_nan(list_of_points) + else: + return None + + +def concat_points_with_nan(list_of_points: list[cpy.PointArray]) -> cpy.PointArray: + """Concatenate a list of points into a single point array with NaNs used to separate each line. + """ + if not list_of_points: + raise ValueError("Empty list passed to concat_points_with_nan") + + if len(list_of_points) == 1: + return list_of_points[0] + else: + nan_spacer = np.full((1, 2), np.nan, dtype=point_dtype) + list_of_points = [list_of_points[0], + *list(chain(*((nan_spacer, x) for x in list_of_points[1:])))] + return concat_points(list_of_points) + + +def insert_nan_at_offsets(points: cpy.PointArray, offsets: cpy.OffsetArray) -> cpy.PointArray: + """Insert NaNs into a point array at locations specified by an offset array. + """ + check_point_array(points) + check_offset_array(offsets) + + if len(offsets) <= 2: + return points + else: + nan_spacer = np.array([np.nan, np.nan], dtype=point_dtype) + # Convert offsets to int64 to avoid numpy error when mixing signed and unsigned ints. + return np.insert(points, offsets[1:-1].astype(np.int64), nan_spacer, axis=0) + + +def offsets_from_codes(codes: cpy.CodeArray) -> cpy.OffsetArray: + """Determine offsets from codes using locations of MOVETO codes. + """ + check_code_array(codes) + + return np.append(np.nonzero(codes == MOVETO)[0], len(codes)).astype(offset_dtype) + + +def offsets_from_lengths(list_of_points: list[cpy.PointArray]) -> cpy.OffsetArray: + """Determine offsets from lengths of point arrays. + """ + if not list_of_points: + raise ValueError("Empty list passed to offsets_from_lengths") + + return np.cumsum([0] + [len(line) for line in list_of_points], dtype=offset_dtype) + + +def outer_offsets_from_list_of_codes(list_of_codes: list[cpy.CodeArray]) -> cpy.OffsetArray: + """Determine outer offsets from codes using locations of MOVETO codes. + """ + if not list_of_codes: + raise ValueError("Empty list passed to outer_offsets_from_list_of_codes") + + return np.cumsum([0] + [np.count_nonzero(codes == MOVETO) for codes in list_of_codes], + dtype=offset_dtype) + + +def outer_offsets_from_list_of_offsets(list_of_offsets: list[cpy.OffsetArray]) -> cpy.OffsetArray: + """Determine outer offsets from a list of offsets. + """ + if not list_of_offsets: + raise ValueError("Empty list passed to outer_offsets_from_list_of_offsets") + + return np.cumsum([0] + [len(offsets)-1 for offsets in list_of_offsets], dtype=offset_dtype) + + +def remove_nan(points: cpy.PointArray) -> tuple[cpy.PointArray, cpy.OffsetArray]: + """Remove NaN from a points array, also return the offsets corresponding to the NaN removed. + """ + check_point_array(points) + + nan_offsets = np.nonzero(np.isnan(points[:, 0]))[0] + if len(nan_offsets) == 0: + return points, np.array([0, len(points)], dtype=offset_dtype) + else: + points = np.delete(points, nan_offsets, axis=0) + nan_offsets -= np.arange(len(nan_offsets)) + offsets: cpy.OffsetArray = np.empty(len(nan_offsets)+2, dtype=offset_dtype) + offsets[0] = 0 + offsets[1:-1] = nan_offsets + offsets[-1] = len(points) + return points, offsets + + +def split_codes_by_offsets(codes: cpy.CodeArray, offsets: cpy.OffsetArray) -> list[cpy.CodeArray]: + """Split a code array at locations specified by an offset array into a list of code arrays. + """ + check_code_array(codes) + check_offset_array(offsets) + + if len(offsets) > 2: + return np.split(codes, offsets[1:-1]) + else: + return [codes] + + +def split_points_by_offsets( + points: cpy.PointArray, + offsets: cpy.OffsetArray, +) -> list[cpy.PointArray]: + """Split a point array at locations specified by an offset array into a list of point arrays. + """ + check_point_array(points) + check_offset_array(offsets) + + if len(offsets) > 2: + return np.split(points, offsets[1:-1]) + else: + return [points] + + +def split_points_at_nan(points: cpy.PointArray) -> list[cpy.PointArray]: + """Split a points array at NaNs into a list of point arrays. + """ + check_point_array(points) + + nan_offsets = np.nonzero(np.isnan(points[:, 0]))[0] + if len(nan_offsets) == 0: + return [points] + else: + nan_offsets = np.concatenate(([-1], nan_offsets, [len(points)])) + return [points[s+1:e] for s, e in pairwise(nan_offsets)] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/chunk.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/chunk.py new file mode 100644 index 0000000000000000000000000000000000000000..94fded1b161d64adb23748b067630370d8fa2f3c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/chunk.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import math + + +def calc_chunk_sizes( + chunk_size: int | tuple[int, int] | None, + chunk_count: int | tuple[int, int] | None, + total_chunk_count: int | None, + ny: int, + nx: int, +) -> tuple[int, int]: + """Calculate chunk sizes. + + Args: + chunk_size (int or tuple(int, int), optional): Chunk size in (y, x) directions, or the same + size in both directions if only one is specified. Cannot be negative. + chunk_count (int or tuple(int, int), optional): Chunk count in (y, x) directions, or the + same count in both directions if only one is specified. If less than 1, set to 1. + total_chunk_count (int, optional): Total number of chunks. If less than 1, set to 1. + ny (int): Number of grid points in y-direction. + nx (int): Number of grid points in x-direction. + + Return: + tuple(int, int): Chunk sizes (y_chunk_size, x_chunk_size). + + Note: + Zero or one of ``chunk_size``, ``chunk_count`` and ``total_chunk_count`` should be + specified. + """ + if sum([chunk_size is not None, chunk_count is not None, total_chunk_count is not None]) > 1: + raise ValueError("Only one of chunk_size, chunk_count and total_chunk_count should be set") + + if nx < 2 or ny < 2: + raise ValueError(f"(ny, nx) must be at least (2, 2), not ({ny}, {nx})") + + if total_chunk_count is not None: + max_chunk_count = (nx-1)*(ny-1) + total_chunk_count = min(max(total_chunk_count, 1), max_chunk_count) + if total_chunk_count == 1: + chunk_size = 0 + elif total_chunk_count == max_chunk_count: + chunk_size = (1, 1) + else: + factors = two_factors(total_chunk_count) + if ny > nx: + chunk_count = factors + else: + chunk_count = (factors[1], factors[0]) + + if chunk_count is not None: + if isinstance(chunk_count, tuple): + y_chunk_count, x_chunk_count = chunk_count + else: + y_chunk_count = x_chunk_count = chunk_count + x_chunk_count = min(max(x_chunk_count, 1), nx-1) + y_chunk_count = min(max(y_chunk_count, 1), ny-1) + chunk_size = (math.ceil((ny-1) / y_chunk_count), math.ceil((nx-1) / x_chunk_count)) + + if chunk_size is None: + y_chunk_size = x_chunk_size = 0 + elif isinstance(chunk_size, tuple): + y_chunk_size, x_chunk_size = chunk_size + else: + y_chunk_size = x_chunk_size = chunk_size + + if x_chunk_size < 0 or y_chunk_size < 0: + raise ValueError("chunk_size cannot be negative") + + return y_chunk_size, x_chunk_size + + +def two_factors(n: int) -> tuple[int, int]: + """Split an integer into two integer factors. + + The two factors will be as close as possible to the sqrt of n, and are returned in decreasing + order. Worst case returns (n, 1). + + Args: + n (int): The integer to factorize, must be positive. + + Return: + tuple(int, int): The two factors of n, in decreasing order. + """ + if n < 0: + raise ValueError(f"two_factors expects positive integer not {n}") + + i = math.ceil(math.sqrt(n)) + while n % i != 0: + i -= 1 + j = n // i + if i > j: + return i, j + else: + return j, i diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/convert.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/convert.py new file mode 100644 index 0000000000000000000000000000000000000000..ebdb1af8bf2f9b30c2d177dce9adf876dcb32835 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/convert.py @@ -0,0 +1,621 @@ +from __future__ import annotations + +from itertools import pairwise +from typing import TYPE_CHECKING, cast + +import numpy as np + +from contourpy._contourpy import FillType, LineType +import contourpy.array as arr +from contourpy.enum_util import as_fill_type, as_line_type +from contourpy.typecheck import check_filled, check_lines +from contourpy.types import MOVETO, offset_dtype + +if TYPE_CHECKING: + import contourpy._contourpy as cpy + + +def _convert_filled_from_OuterCode( + filled: cpy.FillReturn_OuterCode, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.OuterCode: + return filled + elif fill_type_to == FillType.OuterOffset: + return (filled[0], [arr.offsets_from_codes(codes) for codes in filled[1]]) + + if len(filled[0]) > 0: + points = arr.concat_points(filled[0]) + codes = arr.concat_codes(filled[1]) + else: + points = None + codes = None + + if fill_type_to == FillType.ChunkCombinedCode: + return ([points], [codes]) + elif fill_type_to == FillType.ChunkCombinedOffset: + return ([points], [None if codes is None else arr.offsets_from_codes(codes)]) + elif fill_type_to == FillType.ChunkCombinedCodeOffset: + outer_offsets = None if points is None else arr.offsets_from_lengths(filled[0]) + ret1: cpy.FillReturn_ChunkCombinedCodeOffset = ([points], [codes], [outer_offsets]) + return ret1 + elif fill_type_to == FillType.ChunkCombinedOffsetOffset: + if codes is None: + ret2: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None]) + else: + offsets = arr.offsets_from_codes(codes) + outer_offsets = arr.outer_offsets_from_list_of_codes(filled[1]) + ret2 = ([points], [offsets], [outer_offsets]) + return ret2 + else: + raise ValueError(f"Invalid FillType {fill_type_to}") + + +def _convert_filled_from_OuterOffset( + filled: cpy.FillReturn_OuterOffset, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.OuterCode: + separate_codes = [arr.codes_from_offsets(offsets) for offsets in filled[1]] + return (filled[0], separate_codes) + elif fill_type_to == FillType.OuterOffset: + return filled + + if len(filled[0]) > 0: + points = arr.concat_points(filled[0]) + offsets = arr.concat_offsets(filled[1]) + else: + points = None + offsets = None + + if fill_type_to == FillType.ChunkCombinedCode: + return ([points], [None if offsets is None else arr.codes_from_offsets(offsets)]) + elif fill_type_to == FillType.ChunkCombinedOffset: + return ([points], [offsets]) + elif fill_type_to == FillType.ChunkCombinedCodeOffset: + if offsets is None: + ret1: cpy.FillReturn_ChunkCombinedCodeOffset = ([None], [None], [None]) + else: + codes = arr.codes_from_offsets(offsets) + outer_offsets = arr.offsets_from_lengths(filled[0]) + ret1 = ([points], [codes], [outer_offsets]) + return ret1 + elif fill_type_to == FillType.ChunkCombinedOffsetOffset: + if points is None: + ret2: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None]) + else: + outer_offsets = arr.outer_offsets_from_list_of_offsets(filled[1]) + ret2 = ([points], [offsets], [outer_offsets]) + return ret2 + else: + raise ValueError(f"Invalid FillType {fill_type_to}") + + +def _convert_filled_from_ChunkCombinedCode( + filled: cpy.FillReturn_ChunkCombinedCode, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.ChunkCombinedCode: + return filled + elif fill_type_to == FillType.ChunkCombinedOffset: + codes = [None if codes is None else arr.offsets_from_codes(codes) for codes in filled[1]] + return (filled[0], codes) + else: + raise ValueError( + f"Conversion from {FillType.ChunkCombinedCode} to {fill_type_to} not supported") + + +def _convert_filled_from_ChunkCombinedOffset( + filled: cpy.FillReturn_ChunkCombinedOffset, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.ChunkCombinedCode: + chunk_codes: list[cpy.CodeArray | None] = [] + for points, offsets in zip(*filled): + if points is None: + chunk_codes.append(None) + else: + if TYPE_CHECKING: + assert offsets is not None + chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points)) + return (filled[0], chunk_codes) + elif fill_type_to == FillType.ChunkCombinedOffset: + return filled + else: + raise ValueError( + f"Conversion from {FillType.ChunkCombinedOffset} to {fill_type_to} not supported") + + +def _convert_filled_from_ChunkCombinedCodeOffset( + filled: cpy.FillReturn_ChunkCombinedCodeOffset, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.OuterCode: + separate_points = [] + separate_codes = [] + for points, codes, outer_offsets in zip(*filled): + if points is not None: + if TYPE_CHECKING: + assert codes is not None + assert outer_offsets is not None + separate_points += arr.split_points_by_offsets(points, outer_offsets) + separate_codes += arr.split_codes_by_offsets(codes, outer_offsets) + return (separate_points, separate_codes) + elif fill_type_to == FillType.OuterOffset: + separate_points = [] + separate_offsets = [] + for points, codes, outer_offsets in zip(*filled): + if points is not None: + if TYPE_CHECKING: + assert codes is not None + assert outer_offsets is not None + separate_points += arr.split_points_by_offsets(points, outer_offsets) + separate_codes = arr.split_codes_by_offsets(codes, outer_offsets) + separate_offsets += [arr.offsets_from_codes(codes) for codes in separate_codes] + return (separate_points, separate_offsets) + elif fill_type_to == FillType.ChunkCombinedCode: + ret1: cpy.FillReturn_ChunkCombinedCode = (filled[0], filled[1]) + return ret1 + elif fill_type_to == FillType.ChunkCombinedOffset: + all_offsets = [None if codes is None else arr.offsets_from_codes(codes) + for codes in filled[1]] + ret2: cpy.FillReturn_ChunkCombinedOffset = (filled[0], all_offsets) + return ret2 + elif fill_type_to == FillType.ChunkCombinedCodeOffset: + return filled + elif fill_type_to == FillType.ChunkCombinedOffsetOffset: + chunk_offsets: list[cpy.OffsetArray | None] = [] + chunk_outer_offsets: list[cpy.OffsetArray | None] = [] + for codes, outer_offsets in zip(*filled[1:]): + if codes is None: + chunk_offsets.append(None) + chunk_outer_offsets.append(None) + else: + if TYPE_CHECKING: + assert outer_offsets is not None + offsets = arr.offsets_from_codes(codes) + outer_offsets = np.array([np.nonzero(offsets == oo)[0][0] for oo in outer_offsets], + dtype=offset_dtype) + chunk_offsets.append(offsets) + chunk_outer_offsets.append(outer_offsets) + ret3: cpy.FillReturn_ChunkCombinedOffsetOffset = ( + filled[0], chunk_offsets, chunk_outer_offsets, + ) + return ret3 + else: + raise ValueError(f"Invalid FillType {fill_type_to}") + + +def _convert_filled_from_ChunkCombinedOffsetOffset( + filled: cpy.FillReturn_ChunkCombinedOffsetOffset, + fill_type_to: FillType, +) -> cpy.FillReturn: + if fill_type_to == FillType.OuterCode: + separate_points = [] + separate_codes = [] + for points, offsets, outer_offsets in zip(*filled): + if points is not None: + if TYPE_CHECKING: + assert offsets is not None + assert outer_offsets is not None + codes = arr.codes_from_offsets_and_points(offsets, points) + outer_offsets = offsets[outer_offsets] + separate_points += arr.split_points_by_offsets(points, outer_offsets) + separate_codes += arr.split_codes_by_offsets(codes, outer_offsets) + return (separate_points, separate_codes) + elif fill_type_to == FillType.OuterOffset: + separate_points = [] + separate_offsets = [] + for points, offsets, outer_offsets in zip(*filled): + if points is not None: + if TYPE_CHECKING: + assert offsets is not None + assert outer_offsets is not None + if len(outer_offsets) > 2: + separate_offsets += [offsets[s:e+1] - offsets[s] for s, e in + pairwise(outer_offsets)] + else: + separate_offsets.append(offsets) + separate_points += arr.split_points_by_offsets(points, offsets[outer_offsets]) + return (separate_points, separate_offsets) + elif fill_type_to == FillType.ChunkCombinedCode: + chunk_codes: list[cpy.CodeArray | None] = [] + for points, offsets, outer_offsets in zip(*filled): + if points is None: + chunk_codes.append(None) + else: + if TYPE_CHECKING: + assert offsets is not None + assert outer_offsets is not None + chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points)) + ret1: cpy.FillReturn_ChunkCombinedCode = (filled[0], chunk_codes) + return ret1 + elif fill_type_to == FillType.ChunkCombinedOffset: + return (filled[0], filled[1]) + elif fill_type_to == FillType.ChunkCombinedCodeOffset: + chunk_codes = [] + chunk_outer_offsets: list[cpy.OffsetArray | None] = [] + for points, offsets, outer_offsets in zip(*filled): + if points is None: + chunk_codes.append(None) + chunk_outer_offsets.append(None) + else: + if TYPE_CHECKING: + assert offsets is not None + assert outer_offsets is not None + chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points)) + chunk_outer_offsets.append(offsets[outer_offsets]) + ret2: cpy.FillReturn_ChunkCombinedCodeOffset = (filled[0], chunk_codes, chunk_outer_offsets) + return ret2 + elif fill_type_to == FillType.ChunkCombinedOffsetOffset: + return filled + else: + raise ValueError(f"Invalid FillType {fill_type_to}") + + +def convert_filled( + filled: cpy.FillReturn, + fill_type_from: FillType | str, + fill_type_to: FillType | str, +) -> cpy.FillReturn: + """Convert filled contours from one :class:`~.FillType` to another. + + Args: + filled (sequence of arrays): Filled contour polygons to convert, such as those returned by + :meth:`.ContourGenerator.filled`. + fill_type_from (FillType or str): :class:`~.FillType` to convert from as enum or + string equivalent. + fill_type_to (FillType or str): :class:`~.FillType` to convert to as enum or string + equivalent. + + Return: + Converted filled contour polygons. + + When converting non-chunked fill types (``FillType.OuterCode`` or ``FillType.OuterOffset``) to + chunked ones, all polygons are placed in the first chunk. When converting in the other + direction, all chunk information is discarded. Converting a fill type that is not aware of the + relationship between outer boundaries and contained holes (``FillType.ChunkCombinedCode`` or + ``FillType.ChunkCombinedOffset``) to one that is will raise a ``ValueError``. + + .. versionadded:: 1.2.0 + """ + fill_type_from = as_fill_type(fill_type_from) + fill_type_to = as_fill_type(fill_type_to) + + check_filled(filled, fill_type_from) + + if fill_type_from == FillType.OuterCode: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_OuterCode, filled) + return _convert_filled_from_OuterCode(filled, fill_type_to) + elif fill_type_from == FillType.OuterOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_OuterOffset, filled) + return _convert_filled_from_OuterOffset(filled, fill_type_to) + elif fill_type_from == FillType.ChunkCombinedCode: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCode, filled) + return _convert_filled_from_ChunkCombinedCode(filled, fill_type_to) + elif fill_type_from == FillType.ChunkCombinedOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffset, filled) + return _convert_filled_from_ChunkCombinedOffset(filled, fill_type_to) + elif fill_type_from == FillType.ChunkCombinedCodeOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCodeOffset, filled) + return _convert_filled_from_ChunkCombinedCodeOffset(filled, fill_type_to) + elif fill_type_from == FillType.ChunkCombinedOffsetOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffsetOffset, filled) + return _convert_filled_from_ChunkCombinedOffsetOffset(filled, fill_type_to) + else: + raise ValueError(f"Invalid FillType {fill_type_from}") + + +def _convert_lines_from_Separate( + lines: cpy.LineReturn_Separate, + line_type_to: LineType, +) -> cpy.LineReturn: + if line_type_to == LineType.Separate: + return lines + elif line_type_to == LineType.SeparateCode: + separate_codes = [arr.codes_from_points(line) for line in lines] + return (lines, separate_codes) + elif line_type_to == LineType.ChunkCombinedCode: + if not lines: + ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None]) + else: + points = arr.concat_points(lines) + offsets = arr.offsets_from_lengths(lines) + codes = arr.codes_from_offsets_and_points(offsets, points) + ret1 = ([points], [codes]) + return ret1 + elif line_type_to == LineType.ChunkCombinedOffset: + if not lines: + ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None]) + else: + ret2 = ([arr.concat_points(lines)], [arr.offsets_from_lengths(lines)]) + return ret2 + elif line_type_to == LineType.ChunkCombinedNan: + if not lines: + ret3: cpy.LineReturn_ChunkCombinedNan = ([None],) + else: + ret3 = ([arr.concat_points_with_nan(lines)],) + return ret3 + else: + raise ValueError(f"Invalid LineType {line_type_to}") + + +def _convert_lines_from_SeparateCode( + lines: cpy.LineReturn_SeparateCode, + line_type_to: LineType, +) -> cpy.LineReturn: + if line_type_to == LineType.Separate: + # Drop codes. + return lines[0] + elif line_type_to == LineType.SeparateCode: + return lines + elif line_type_to == LineType.ChunkCombinedCode: + if not lines[0]: + ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None]) + else: + ret1 = ([arr.concat_points(lines[0])], [arr.concat_codes(lines[1])]) + return ret1 + elif line_type_to == LineType.ChunkCombinedOffset: + if not lines[0]: + ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None]) + else: + ret2 = ([arr.concat_points(lines[0])], [arr.offsets_from_lengths(lines[0])]) + return ret2 + elif line_type_to == LineType.ChunkCombinedNan: + if not lines[0]: + ret3: cpy.LineReturn_ChunkCombinedNan = ([None],) + else: + ret3 = ([arr.concat_points_with_nan(lines[0])],) + return ret3 + else: + raise ValueError(f"Invalid LineType {line_type_to}") + + +def _convert_lines_from_ChunkCombinedCode( + lines: cpy.LineReturn_ChunkCombinedCode, + line_type_to: LineType, +) -> cpy.LineReturn: + if line_type_to in (LineType.Separate, LineType.SeparateCode): + separate_lines = [] + for points, codes in zip(*lines): + if points is not None: + if TYPE_CHECKING: + assert codes is not None + split_at = np.nonzero(codes == MOVETO)[0] + if len(split_at) > 1: + separate_lines += np.split(points, split_at[1:]) + else: + separate_lines.append(points) + if line_type_to == LineType.Separate: + return separate_lines + else: + separate_codes = [arr.codes_from_points(line) for line in separate_lines] + return (separate_lines, separate_codes) + elif line_type_to == LineType.ChunkCombinedCode: + return lines + elif line_type_to == LineType.ChunkCombinedOffset: + chunk_offsets = [None if codes is None else arr.offsets_from_codes(codes) + for codes in lines[1]] + return (lines[0], chunk_offsets) + elif line_type_to == LineType.ChunkCombinedNan: + points_nan: list[cpy.PointArray | None] = [] + for points, codes in zip(*lines): + if points is None: + points_nan.append(None) + else: + if TYPE_CHECKING: + assert codes is not None + offsets = arr.offsets_from_codes(codes) + points_nan.append(arr.insert_nan_at_offsets(points, offsets)) + return (points_nan,) + else: + raise ValueError(f"Invalid LineType {line_type_to}") + + +def _convert_lines_from_ChunkCombinedOffset( + lines: cpy.LineReturn_ChunkCombinedOffset, + line_type_to: LineType, +) -> cpy.LineReturn: + if line_type_to in (LineType.Separate, LineType.SeparateCode): + separate_lines = [] + for points, offsets in zip(*lines): + if points is not None: + if TYPE_CHECKING: + assert offsets is not None + separate_lines += arr.split_points_by_offsets(points, offsets) + if line_type_to == LineType.Separate: + return separate_lines + else: + separate_codes = [arr.codes_from_points(line) for line in separate_lines] + return (separate_lines, separate_codes) + elif line_type_to == LineType.ChunkCombinedCode: + chunk_codes: list[cpy.CodeArray | None] = [] + for points, offsets in zip(*lines): + if points is None: + chunk_codes.append(None) + else: + if TYPE_CHECKING: + assert offsets is not None + chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points)) + return (lines[0], chunk_codes) + elif line_type_to == LineType.ChunkCombinedOffset: + return lines + elif line_type_to == LineType.ChunkCombinedNan: + points_nan: list[cpy.PointArray | None] = [] + for points, offsets in zip(*lines): + if points is None: + points_nan.append(None) + else: + if TYPE_CHECKING: + assert offsets is not None + points_nan.append(arr.insert_nan_at_offsets(points, offsets)) + return (points_nan,) + else: + raise ValueError(f"Invalid LineType {line_type_to}") + + +def _convert_lines_from_ChunkCombinedNan( + lines: cpy.LineReturn_ChunkCombinedNan, + line_type_to: LineType, +) -> cpy.LineReturn: + if line_type_to in (LineType.Separate, LineType.SeparateCode): + separate_lines = [] + for points in lines[0]: + if points is not None: + separate_lines += arr.split_points_at_nan(points) + if line_type_to == LineType.Separate: + return separate_lines + else: + separate_codes = [arr.codes_from_points(points) for points in separate_lines] + return (separate_lines, separate_codes) + elif line_type_to == LineType.ChunkCombinedCode: + chunk_points: list[cpy.PointArray | None] = [] + chunk_codes: list[cpy.CodeArray | None] = [] + for points in lines[0]: + if points is None: + chunk_points.append(None) + chunk_codes.append(None) + else: + points, offsets = arr.remove_nan(points) + chunk_points.append(points) + chunk_codes.append(arr.codes_from_offsets_and_points(offsets, points)) + return (chunk_points, chunk_codes) + elif line_type_to == LineType.ChunkCombinedOffset: + chunk_points = [] + chunk_offsets: list[cpy.OffsetArray | None] = [] + for points in lines[0]: + if points is None: + chunk_points.append(None) + chunk_offsets.append(None) + else: + points, offsets = arr.remove_nan(points) + chunk_points.append(points) + chunk_offsets.append(offsets) + return (chunk_points, chunk_offsets) + elif line_type_to == LineType.ChunkCombinedNan: + return lines + else: + raise ValueError(f"Invalid LineType {line_type_to}") + + +def convert_lines( + lines: cpy.LineReturn, + line_type_from: LineType | str, + line_type_to: LineType | str, +) -> cpy.LineReturn: + """Convert contour lines from one :class:`~.LineType` to another. + + Args: + lines (sequence of arrays): Contour lines to convert, such as those returned by + :meth:`.ContourGenerator.lines`. + line_type_from (LineType or str): :class:`~.LineType` to convert from as enum or + string equivalent. + line_type_to (LineType or str): :class:`~.LineType` to convert to as enum or string + equivalent. + + Return: + Converted contour lines. + + When converting non-chunked line types (``LineType.Separate`` or ``LineType.SeparateCode``) to + chunked ones (``LineType.ChunkCombinedCode``, ``LineType.ChunkCombinedOffset`` or + ``LineType.ChunkCombinedNan``), all lines are placed in the first chunk. When converting in the + other direction, all chunk information is discarded. + + .. versionadded:: 1.2.0 + """ + line_type_from = as_line_type(line_type_from) + line_type_to = as_line_type(line_type_to) + + check_lines(lines, line_type_from) + + if line_type_from == LineType.Separate: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_Separate, lines) + return _convert_lines_from_Separate(lines, line_type_to) + elif line_type_from == LineType.SeparateCode: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_SeparateCode, lines) + return _convert_lines_from_SeparateCode(lines, line_type_to) + elif line_type_from == LineType.ChunkCombinedCode: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedCode, lines) + return _convert_lines_from_ChunkCombinedCode(lines, line_type_to) + elif line_type_from == LineType.ChunkCombinedOffset: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedOffset, lines) + return _convert_lines_from_ChunkCombinedOffset(lines, line_type_to) + elif line_type_from == LineType.ChunkCombinedNan: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedNan, lines) + return _convert_lines_from_ChunkCombinedNan(lines, line_type_to) + else: + raise ValueError(f"Invalid LineType {line_type_from}") + + +def convert_multi_filled( + multi_filled: list[cpy.FillReturn], + fill_type_from: FillType | str, + fill_type_to: FillType | str, +) -> list[cpy.FillReturn]: + """Convert multiple sets of filled contours from one :class:`~.FillType` to another. + + Args: + multi_filled (nested sequence of arrays): Filled contour polygons to convert, such as those + returned by :meth:`.ContourGenerator.multi_filled`. + fill_type_from (FillType or str): :class:`~.FillType` to convert from as enum or + string equivalent. + fill_type_to (FillType or str): :class:`~.FillType` to convert to as enum or string + equivalent. + + Return: + Converted sets filled contour polygons. + + When converting non-chunked fill types (``FillType.OuterCode`` or ``FillType.OuterOffset``) to + chunked ones, all polygons are placed in the first chunk. When converting in the other + direction, all chunk information is discarded. Converting a fill type that is not aware of the + relationship between outer boundaries and contained holes (``FillType.ChunkCombinedCode`` or + ``FillType.ChunkCombinedOffset``) to one that is will raise a ``ValueError``. + + .. versionadded:: 1.3.0 + """ + fill_type_from = as_fill_type(fill_type_from) + fill_type_to = as_fill_type(fill_type_to) + + return [convert_filled(filled, fill_type_from, fill_type_to) for filled in multi_filled] + + +def convert_multi_lines( + multi_lines: list[cpy.LineReturn], + line_type_from: LineType | str, + line_type_to: LineType | str, +) -> list[cpy.LineReturn]: + """Convert multiple sets of contour lines from one :class:`~.LineType` to another. + + Args: + multi_lines (nested sequence of arrays): Contour lines to convert, such as those returned by + :meth:`.ContourGenerator.multi_lines`. + line_type_from (LineType or str): :class:`~.LineType` to convert from as enum or + string equivalent. + line_type_to (LineType or str): :class:`~.LineType` to convert to as enum or string + equivalent. + + Return: + Converted set of contour lines. + + When converting non-chunked line types (``LineType.Separate`` or ``LineType.SeparateCode``) to + chunked ones (``LineType.ChunkCombinedCode``, ``LineType.ChunkCombinedOffset`` or + ``LineType.ChunkCombinedNan``), all lines are placed in the first chunk. When converting in the + other direction, all chunk information is discarded. + + .. versionadded:: 1.3.0 + """ + line_type_from = as_line_type(line_type_from) + line_type_to = as_line_type(line_type_to) + + return [convert_lines(lines, line_type_from, line_type_to) for lines in multi_lines] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/dechunk.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/dechunk.py new file mode 100644 index 0000000000000000000000000000000000000000..f571b4b5ffbb4801cd687b065314b51a93a1997d --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/dechunk.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, cast + +from contourpy._contourpy import FillType, LineType +from contourpy.array import ( + concat_codes_or_none, + concat_offsets_or_none, + concat_points_or_none, + concat_points_or_none_with_nan, +) +from contourpy.enum_util import as_fill_type, as_line_type +from contourpy.typecheck import check_filled, check_lines + +if TYPE_CHECKING: + import contourpy._contourpy as cpy + + +def dechunk_filled(filled: cpy.FillReturn, fill_type: FillType | str) -> cpy.FillReturn: + """Return the specified filled contours with chunked data moved into the first chunk. + + Filled contours that are not chunked (``FillType.OuterCode`` and ``FillType.OuterOffset``) and + those that are but only contain a single chunk are returned unmodified. Individual polygons are + unchanged, they are not geometrically combined. + + Args: + filled (sequence of arrays): Filled contour data, such as returned by + :meth:`.ContourGenerator.filled`. + fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` as enum or string + equivalent. + + Return: + Filled contours in a single chunk. + + .. versionadded:: 1.2.0 + """ + fill_type = as_fill_type(fill_type) + + if fill_type in (FillType.OuterCode, FillType.OuterOffset): + # No-op if fill_type is not chunked. + return filled + + check_filled(filled, fill_type) + if len(filled[0]) < 2: + # No-op if just one chunk. + return filled + + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_Chunk, filled) + points = concat_points_or_none(filled[0]) + + if fill_type == FillType.ChunkCombinedCode: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCode, filled) + if points is None: + ret1: cpy.FillReturn_ChunkCombinedCode = ([None], [None]) + else: + ret1 = ([points], [concat_codes_or_none(filled[1])]) + return ret1 + elif fill_type == FillType.ChunkCombinedOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffset, filled) + if points is None: + ret2: cpy.FillReturn_ChunkCombinedOffset = ([None], [None]) + else: + ret2 = ([points], [concat_offsets_or_none(filled[1])]) + return ret2 + elif fill_type == FillType.ChunkCombinedCodeOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCodeOffset, filled) + if points is None: + ret3: cpy.FillReturn_ChunkCombinedCodeOffset = ([None], [None], [None]) + else: + outer_offsets = concat_offsets_or_none(filled[2]) + ret3 = ([points], [concat_codes_or_none(filled[1])], [outer_offsets]) + return ret3 + elif fill_type == FillType.ChunkCombinedOffsetOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffsetOffset, filled) + if points is None: + ret4: cpy.FillReturn_ChunkCombinedOffsetOffset = ([None], [None], [None]) + else: + outer_offsets = concat_offsets_or_none(filled[2]) + ret4 = ([points], [concat_offsets_or_none(filled[1])], [outer_offsets]) + return ret4 + else: + raise ValueError(f"Invalid FillType {fill_type}") + + +def dechunk_lines(lines: cpy.LineReturn, line_type: LineType | str) -> cpy.LineReturn: + """Return the specified contour lines with chunked data moved into the first chunk. + + Contour lines that are not chunked (``LineType.Separate`` and ``LineType.SeparateCode``) and + those that are but only contain a single chunk are returned unmodified. Individual lines are + unchanged, they are not geometrically combined. + + Args: + lines (sequence of arrays): Contour line data, such as returned by + :meth:`.ContourGenerator.lines`. + line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` as enum or string + equivalent. + + Return: + Contour lines in a single chunk. + + .. versionadded:: 1.2.0 + """ + line_type = as_line_type(line_type) + + if line_type in (LineType.Separate, LineType.SeparateCode): + # No-op if line_type is not chunked. + return lines + + check_lines(lines, line_type) + if len(lines[0]) < 2: + # No-op if just one chunk. + return lines + + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_Chunk, lines) + + if line_type == LineType.ChunkCombinedCode: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedCode, lines) + points = concat_points_or_none(lines[0]) + if points is None: + ret1: cpy.LineReturn_ChunkCombinedCode = ([None], [None]) + else: + ret1 = ([points], [concat_codes_or_none(lines[1])]) + return ret1 + elif line_type == LineType.ChunkCombinedOffset: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedOffset, lines) + points = concat_points_or_none(lines[0]) + if points is None: + ret2: cpy.LineReturn_ChunkCombinedOffset = ([None], [None]) + else: + ret2 = ([points], [concat_offsets_or_none(lines[1])]) + return ret2 + elif line_type == LineType.ChunkCombinedNan: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedNan, lines) + points = concat_points_or_none_with_nan(lines[0]) + ret3: cpy.LineReturn_ChunkCombinedNan = ([points],) + return ret3 + else: + raise ValueError(f"Invalid LineType {line_type}") + + +def dechunk_multi_filled( + multi_filled: list[cpy.FillReturn], + fill_type: FillType | str, +) -> list[cpy.FillReturn]: + """Return multiple sets of filled contours with chunked data moved into the first chunks. + + Filled contours that are not chunked (``FillType.OuterCode`` and ``FillType.OuterOffset``) and + those that are but only contain a single chunk are returned unmodified. Individual polygons are + unchanged, they are not geometrically combined. + + Args: + multi_filled (nested sequence of arrays): Filled contour data, such as returned by + :meth:`.ContourGenerator.multi_filled`. + fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` as enum or string + equivalent. + + Return: + Multiple sets of filled contours in a single chunk. + + .. versionadded:: 1.3.0 + """ + fill_type = as_fill_type(fill_type) + + if fill_type in (FillType.OuterCode, FillType.OuterOffset): + # No-op if fill_type is not chunked. + return multi_filled + + return [dechunk_filled(filled, fill_type) for filled in multi_filled] + + +def dechunk_multi_lines( + multi_lines: list[cpy.LineReturn], + line_type: LineType | str, +) -> list[cpy.LineReturn]: + """Return multiple sets of contour lines with all chunked data moved into the first chunks. + + Contour lines that are not chunked (``LineType.Separate`` and ``LineType.SeparateCode``) and + those that are but only contain a single chunk are returned unmodified. Individual lines are + unchanged, they are not geometrically combined. + + Args: + multi_lines (nested sequence of arrays): Contour line data, such as returned by + :meth:`.ContourGenerator.multi_lines`. + line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` as enum or string + equivalent. + + Return: + Multiple sets of contour lines in a single chunk. + + .. versionadded:: 1.3.0 + """ + line_type = as_line_type(line_type) + + if line_type in (LineType.Separate, LineType.SeparateCode): + # No-op if line_type is not chunked. + return multi_lines + + return [dechunk_lines(lines, line_type) for lines in multi_lines] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/enum_util.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/enum_util.py new file mode 100644 index 0000000000000000000000000000000000000000..14229abebf7e342a2834ffec2a0ad427a727fa5c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/enum_util.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from contourpy._contourpy import FillType, LineType, ZInterp + + +def as_fill_type(fill_type: FillType | str) -> FillType: + """Coerce a FillType or string value to a FillType. + + Args: + fill_type (FillType or str): Value to convert. + + Return: + FillType: Converted value. + """ + if isinstance(fill_type, str): + try: + return FillType.__members__[fill_type] + except KeyError as e: + raise ValueError(f"'{fill_type}' is not a valid FillType") from e + else: + return fill_type + + +def as_line_type(line_type: LineType | str) -> LineType: + """Coerce a LineType or string value to a LineType. + + Args: + line_type (LineType or str): Value to convert. + + Return: + LineType: Converted value. + """ + if isinstance(line_type, str): + try: + return LineType.__members__[line_type] + except KeyError as e: + raise ValueError(f"'{line_type}' is not a valid LineType") from e + else: + return line_type + + +def as_z_interp(z_interp: ZInterp | str) -> ZInterp: + """Coerce a ZInterp or string value to a ZInterp. + + Args: + z_interp (ZInterp or str): Value to convert. + + Return: + ZInterp: Converted value. + """ + if isinstance(z_interp, str): + try: + return ZInterp.__members__[z_interp] + except KeyError as e: + raise ValueError(f"'{z_interp}' is not a valid ZInterp") from e + else: + return z_interp diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/py.typed b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/typecheck.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/typecheck.py new file mode 100644 index 0000000000000000000000000000000000000000..23fbd54856a96f6b6b60b8557c76936f615a9cdc --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/typecheck.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast + +import numpy as np + +from contourpy import FillType, LineType +from contourpy.enum_util import as_fill_type, as_line_type +from contourpy.types import MOVETO, code_dtype, offset_dtype, point_dtype + +if TYPE_CHECKING: + import contourpy._contourpy as cpy + + +# Minimalist array-checking functions that check dtype, ndims and shape only. +# They do not walk the arrays to check the contents for performance reasons. +def check_code_array(codes: Any) -> None: + if not isinstance(codes, np.ndarray): + raise TypeError(f"Expected numpy array not {type(codes)}") + if codes.dtype != code_dtype: + raise ValueError(f"Expected numpy array of dtype {code_dtype} not {codes.dtype}") + if not (codes.ndim == 1 and len(codes) > 1): + raise ValueError(f"Expected numpy array of shape (?,) not {codes.shape}") + if codes[0] != MOVETO: + raise ValueError(f"First element of code array must be {MOVETO}, not {codes[0]}") + + +def check_offset_array(offsets: Any) -> None: + if not isinstance(offsets, np.ndarray): + raise TypeError(f"Expected numpy array not {type(offsets)}") + if offsets.dtype != offset_dtype: + raise ValueError(f"Expected numpy array of dtype {offset_dtype} not {offsets.dtype}") + if not (offsets.ndim == 1 and len(offsets) > 1): + raise ValueError(f"Expected numpy array of shape (?,) not {offsets.shape}") + if offsets[0] != 0: + raise ValueError(f"First element of offset array must be 0, not {offsets[0]}") + + +def check_point_array(points: Any) -> None: + if not isinstance(points, np.ndarray): + raise TypeError(f"Expected numpy array not {type(points)}") + if points.dtype != point_dtype: + raise ValueError(f"Expected numpy array of dtype {point_dtype} not {points.dtype}") + if not (points.ndim == 2 and points.shape[1] ==2 and points.shape[0] > 1): + raise ValueError(f"Expected numpy array of shape (?, 2) not {points.shape}") + + +def _check_tuple_of_lists_with_same_length( + maybe_tuple: Any, + tuple_length: int, + allow_empty_lists: bool = True, +) -> None: + if not isinstance(maybe_tuple, tuple): + raise TypeError(f"Expected tuple not {type(maybe_tuple)}") + if len(maybe_tuple) != tuple_length: + raise ValueError(f"Expected tuple of length {tuple_length} not {len(maybe_tuple)}") + for maybe_list in maybe_tuple: + if not isinstance(maybe_list, list): + msg = f"Expected tuple to contain {tuple_length} lists but found a {type(maybe_list)}" + raise TypeError(msg) + lengths = [len(item) for item in maybe_tuple] + if len(set(lengths)) != 1: + msg = f"Expected {tuple_length} lists with same length but lengths are {lengths}" + raise ValueError(msg) + if not allow_empty_lists and lengths[0] == 0: + raise ValueError(f"Expected {tuple_length} non-empty lists") + + +def check_filled(filled: cpy.FillReturn, fill_type: FillType | str) -> None: + fill_type = as_fill_type(fill_type) + + if fill_type == FillType.OuterCode: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_OuterCode, filled) + _check_tuple_of_lists_with_same_length(filled, 2) + for i, (points, codes) in enumerate(zip(*filled)): + check_point_array(points) + check_code_array(codes) + if len(points) != len(codes): + raise ValueError(f"Points and codes have different lengths in polygon {i}") + elif fill_type == FillType.OuterOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_OuterOffset, filled) + _check_tuple_of_lists_with_same_length(filled, 2) + for i, (points, offsets) in enumerate(zip(*filled)): + check_point_array(points) + check_offset_array(offsets) + if offsets[-1] != len(points): + raise ValueError(f"Inconsistent points and offsets in polygon {i}") + elif fill_type == FillType.ChunkCombinedCode: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCode, filled) + _check_tuple_of_lists_with_same_length(filled, 2, allow_empty_lists=False) + for chunk, (points_or_none, codes_or_none) in enumerate(zip(*filled)): + if points_or_none is not None and codes_or_none is not None: + check_point_array(points_or_none) + check_code_array(codes_or_none) + if len(points_or_none) != len(codes_or_none): + raise ValueError(f"Points and codes have different lengths in chunk {chunk}") + elif not (points_or_none is None and codes_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {chunk}") + elif fill_type == FillType.ChunkCombinedOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffset, filled) + _check_tuple_of_lists_with_same_length(filled, 2, allow_empty_lists=False) + for chunk, (points_or_none, offsets_or_none) in enumerate(zip(*filled)): + if points_or_none is not None and offsets_or_none is not None: + check_point_array(points_or_none) + check_offset_array(offsets_or_none) + if offsets_or_none[-1] != len(points_or_none): + raise ValueError(f"Inconsistent points and offsets in chunk {chunk}") + elif not (points_or_none is None and offsets_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {chunk}") + elif fill_type == FillType.ChunkCombinedCodeOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedCodeOffset, filled) + _check_tuple_of_lists_with_same_length(filled, 3, allow_empty_lists=False) + for i, (points_or_none, codes_or_none, outer_offsets_or_none) in enumerate(zip(*filled)): + if (points_or_none is not None and codes_or_none is not None and + outer_offsets_or_none is not None): + check_point_array(points_or_none) + check_code_array(codes_or_none) + check_offset_array(outer_offsets_or_none) + if len(codes_or_none) != len(points_or_none): + raise ValueError(f"Points and codes have different lengths in chunk {i}") + if outer_offsets_or_none[-1] != len(codes_or_none): + raise ValueError(f"Inconsistent codes and outer_offsets in chunk {i}") + elif not (points_or_none is None and codes_or_none is None and + outer_offsets_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {i}") + elif fill_type == FillType.ChunkCombinedOffsetOffset: + if TYPE_CHECKING: + filled = cast(cpy.FillReturn_ChunkCombinedOffsetOffset, filled) + _check_tuple_of_lists_with_same_length(filled, 3, allow_empty_lists=False) + for i, (points_or_none, offsets_or_none, outer_offsets_or_none) in enumerate(zip(*filled)): + if (points_or_none is not None and offsets_or_none is not None and + outer_offsets_or_none is not None): + check_point_array(points_or_none) + check_offset_array(offsets_or_none) + check_offset_array(outer_offsets_or_none) + if offsets_or_none[-1] != len(points_or_none): + raise ValueError(f"Inconsistent points and offsets in chunk {i}") + if outer_offsets_or_none[-1] != len(offsets_or_none) - 1: + raise ValueError(f"Inconsistent offsets and outer_offsets in chunk {i}") + elif not (points_or_none is None and offsets_or_none is None and + outer_offsets_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {i}") + else: + raise ValueError(f"Invalid FillType {fill_type}") + + +def check_lines(lines: cpy.LineReturn, line_type: LineType | str) -> None: + line_type = as_line_type(line_type) + + if line_type == LineType.Separate: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_Separate, lines) + if not isinstance(lines, list): + raise TypeError(f"Expected list not {type(lines)}") + for points in lines: + check_point_array(points) + elif line_type == LineType.SeparateCode: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_SeparateCode, lines) + _check_tuple_of_lists_with_same_length(lines, 2) + for i, (points, codes) in enumerate(zip(*lines)): + check_point_array(points) + check_code_array(codes) + if len(points) != len(codes): + raise ValueError(f"Points and codes have different lengths in line {i}") + elif line_type == LineType.ChunkCombinedCode: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedCode, lines) + _check_tuple_of_lists_with_same_length(lines, 2, allow_empty_lists=False) + for chunk, (points_or_none, codes_or_none) in enumerate(zip(*lines)): + if points_or_none is not None and codes_or_none is not None: + check_point_array(points_or_none) + check_code_array(codes_or_none) + if len(points_or_none) != len(codes_or_none): + raise ValueError(f"Points and codes have different lengths in chunk {chunk}") + elif not (points_or_none is None and codes_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {chunk}") + elif line_type == LineType.ChunkCombinedOffset: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedOffset, lines) + _check_tuple_of_lists_with_same_length(lines, 2, allow_empty_lists=False) + for chunk, (points_or_none, offsets_or_none) in enumerate(zip(*lines)): + if points_or_none is not None and offsets_or_none is not None: + check_point_array(points_or_none) + check_offset_array(offsets_or_none) + if offsets_or_none[-1] != len(points_or_none): + raise ValueError(f"Inconsistent points and offsets in chunk {chunk}") + elif not (points_or_none is None and offsets_or_none is None): + raise ValueError(f"Inconsistent Nones in chunk {chunk}") + elif line_type == LineType.ChunkCombinedNan: + if TYPE_CHECKING: + lines = cast(cpy.LineReturn_ChunkCombinedNan, lines) + _check_tuple_of_lists_with_same_length(lines, 1, allow_empty_lists=False) + for _chunk, points_or_none in enumerate(lines[0]): + if points_or_none is not None: + check_point_array(points_or_none) + else: + raise ValueError(f"Invalid LineType {line_type}") diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/types.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/types.py new file mode 100644 index 0000000000000000000000000000000000000000..e704b98eac0736d83586c66803db6b79cdfc5967 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/types.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import numpy as np + +# dtypes of arrays returned by ContourPy. +point_dtype = np.float64 +code_dtype = np.uint8 +offset_dtype = np.uint32 + +# Kind codes used in Matplotlib Paths. +MOVETO = 1 +LINETO = 2 +CLOSEPOLY = 79 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fe33fcef1e18d2a4b92287e434cf6b1257e4274f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/__init__.py @@ -0,0 +1,5 @@ +from __future__ import annotations + +from contourpy.util._build_config import build_config + +__all__ = ["build_config"] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/_build_config.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/_build_config.py new file mode 100644 index 0000000000000000000000000000000000000000..1655c23e4ec46f28b987f857fccc76ca687c6ac8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/_build_config.py @@ -0,0 +1,60 @@ +# _build_config.py.in is converted into _build_config.py during the meson build process. + +from __future__ import annotations + + +def build_config() -> dict[str, str]: + """ + Return a dictionary containing build configuration settings. + + All dictionary keys and values are strings, for example ``False`` is + returned as ``"False"``. + + .. versionadded:: 1.1.0 + """ + return dict( + # Python settings + python_version="3.10", + python_install_dir=r"/usr/local/lib/python3.10/site-packages/", + python_path=r"/tmp/build-env-dfa9bu3d/bin/python", + + # Package versions + contourpy_version="1.3.2", + meson_version="1.7.2", + mesonpy_version="0.17.1", + pybind11_version="2.13.6", + + # Misc meson settings + meson_backend="ninja", + build_dir=r"/project/.mesonpy-0roiogjz/lib/contourpy/util", + source_dir=r"/project/lib/contourpy/util", + cross_build="False", + + # Build options + build_options=r"-Dbuildtype=release -Db_ndebug=if-release -Db_vscrt=md -Dvsenv=True --native-file=/project/.mesonpy-0roiogjz/meson-python-native-file.ini", + buildtype="release", + cpp_std="c++17", + debug="False", + optimization="3", + vsenv="True", + b_ndebug="if-release", + b_vscrt="from_buildtype", + + # C++ compiler + compiler_name="gcc", + compiler_version="10.2.1", + linker_id="ld.bfd", + compile_command="c++", + + # Host machine + host_cpu="x86_64", + host_cpu_family="x86_64", + host_cpu_endian="little", + host_cpu_system="linux", + + # Build machine, same as host machine if not a cross_build + build_cpu="x86_64", + build_cpu_family="x86_64", + build_cpu_endian="little", + build_cpu_system="linux", + ) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_renderer.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..6079ac3c755ca141d14d3155874eb68d41d0e504 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_renderer.py @@ -0,0 +1,339 @@ +from __future__ import annotations + +import io +from typing import TYPE_CHECKING, Any + +from bokeh.io import export_png, export_svg, show +from bokeh.io.export import get_screenshot_as_png +from bokeh.layouts import gridplot +from bokeh.models.annotations.labels import Label +from bokeh.palettes import Category10 +from bokeh.plotting import figure +import numpy as np + +from contourpy.enum_util import as_fill_type, as_line_type +from contourpy.util.bokeh_util import filled_to_bokeh, lines_to_bokeh +from contourpy.util.renderer import Renderer + +if TYPE_CHECKING: + from bokeh.core.enums import OutputBackendType + from bokeh.models import GridPlot + from bokeh.palettes import Palette + from numpy.typing import ArrayLike + from selenium.webdriver.remote.webdriver import WebDriver + + from contourpy import FillType, LineType + from contourpy._contourpy import FillReturn, LineReturn + + +class BokehRenderer(Renderer): + """Utility renderer using Bokeh to render a grid of plots over the same (x, y) range. + + Args: + nrows (int, optional): Number of rows of plots, default ``1``. + ncols (int, optional): Number of columns of plots, default ``1``. + figsize (tuple(float, float), optional): Figure size in inches (assuming 100 dpi), default + ``(9, 9)``. + show_frame (bool, optional): Whether to show frame and axes ticks, default ``True``. + want_svg (bool, optional): Whether output is required in SVG format or not, default + ``False``. + + Warning: + :class:`~.BokehRenderer`, unlike :class:`~.MplRenderer`, needs to be told in advance if + output to SVG format will be required later, otherwise it will assume PNG output. + """ + _figures: list[figure] + _layout: GridPlot + _palette: Palette + _want_svg: bool + + def __init__( + self, + nrows: int = 1, + ncols: int = 1, + figsize: tuple[float, float] = (9, 9), + show_frame: bool = True, + want_svg: bool = False, + ) -> None: + self._want_svg = want_svg + self._palette = Category10[10] + + total_size = 100*np.asarray(figsize, dtype=int) # Assuming 100 dpi. + + nfigures = nrows*ncols + self._figures = [] + backend: OutputBackendType = "svg" if self._want_svg else "canvas" + for _ in range(nfigures): + fig = figure(output_backend=backend) + fig.xgrid.visible = False # type: ignore[attr-defined] + fig.ygrid.visible = False # type: ignore[attr-defined] + self._figures.append(fig) + if not show_frame: + fig.outline_line_color = None + fig.axis.visible = False # type: ignore[attr-defined] + + self._layout = gridplot( + self._figures, ncols=ncols, toolbar_location=None, # type: ignore[arg-type] + width=total_size[0] // ncols, height=total_size[1] // nrows) + + def _convert_color(self, color: str) -> str: + if isinstance(color, str) and color[0] == "C": + index = int(color[1:]) + color = self._palette[index] + return color + + def _get_figure(self, ax: figure | int) -> figure: + if isinstance(ax, int): + ax = self._figures[ax] + return ax + + def filled( + self, + filled: FillReturn, + fill_type: FillType | str, + ax: figure | int = 0, + color: str = "C0", + alpha: float = 0.7, + ) -> None: + """Plot filled contours on a single plot. + + Args: + filled (sequence of arrays): Filled contour data as returned by + :meth:`~.ContourGenerator.filled`. + fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` data as returned + by :attr:`~.ContourGenerator.fill_type`, or a string equivalent. + ax (int or Bokeh Figure, optional): Which plot to use, default ``0``. + color (str, optional): Color to plot with. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``Category10`` palette. Default ``"C0"``. + alpha (float, optional): Opacity to plot with, default ``0.7``. + """ + fill_type = as_fill_type(fill_type) + fig = self._get_figure(ax) + color = self._convert_color(color) + xs, ys = filled_to_bokeh(filled, fill_type) + if len(xs) > 0: + fig.multi_polygons(xs=[xs], ys=[ys], color=color, fill_alpha=alpha, line_width=0) # type: ignore[arg-type] + + def grid( + self, + x: ArrayLike, + y: ArrayLike, + ax: figure | int = 0, + color: str = "black", + alpha: float = 0.1, + point_color: str | None = None, + quad_as_tri_alpha: float = 0, + ) -> None: + """Plot quad grid lines on a single plot. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + ax (int or Bokeh Figure, optional): Which plot to use, default ``0``. + color (str, optional): Color to plot grid lines, default ``"black"``. + alpha (float, optional): Opacity to plot lines with, default ``0.1``. + point_color (str, optional): Color to plot grid points or ``None`` if grid points + should not be plotted, default ``None``. + quad_as_tri_alpha (float, optional): Opacity to plot ``quad_as_tri`` grid, default + ``0``. + + Colors may be a string color or the letter ``"C"`` followed by an integer in the range + ``"C0"`` to ``"C9"`` to use a color from the ``Category10`` palette. + + Warning: + ``quad_as_tri_alpha > 0`` plots all quads as though they are unmasked. + """ + fig = self._get_figure(ax) + x, y = self._grid_as_2d(x, y) + xs = list(x) + list(x.T) + ys = list(y) + list(y.T) + kwargs = {"line_color": color, "alpha": alpha} + fig.multi_line(xs, ys, **kwargs) + if quad_as_tri_alpha > 0: + # Assumes no quad mask. + xmid = (0.25*(x[:-1, :-1] + x[1:, :-1] + x[:-1, 1:] + x[1:, 1:])).ravel() + ymid = (0.25*(y[:-1, :-1] + y[1:, :-1] + y[:-1, 1:] + y[1:, 1:])).ravel() + fig.multi_line( + list(np.stack((x[:-1, :-1].ravel(), xmid, x[1:, 1:].ravel()), axis=1)), + list(np.stack((y[:-1, :-1].ravel(), ymid, y[1:, 1:].ravel()), axis=1)), + **kwargs) + fig.multi_line( + list(np.stack((x[:-1, 1:].ravel(), xmid, x[1:, :-1].ravel()), axis=1)), + list(np.stack((y[:-1, 1:].ravel(), ymid, y[1:, :-1].ravel()), axis=1)), + **kwargs) + if point_color is not None: + fig.scatter( + x=x.ravel(), y=y.ravel(), fill_color=color, line_color=None, alpha=alpha, + marker="circle", size=8) + + def lines( + self, + lines: LineReturn, + line_type: LineType | str, + ax: figure | int = 0, + color: str = "C0", + alpha: float = 1.0, + linewidth: float = 1, + ) -> None: + """Plot contour lines on a single plot. + + Args: + lines (sequence of arrays): Contour line data as returned by + :meth:`~.ContourGenerator.lines`. + line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` data as returned + by :attr:`~.ContourGenerator.line_type`, or a string equivalent. + ax (int or Bokeh Figure, optional): Which plot to use, default ``0``. + color (str, optional): Color to plot lines. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``Category10`` palette. Default ``"C0"``. + alpha (float, optional): Opacity to plot lines with, default ``1.0``. + linewidth (float, optional): Width of lines, default ``1``. + + Note: + Assumes all lines are open line strips not closed line loops. + """ + line_type = as_line_type(line_type) + fig = self._get_figure(ax) + color = self._convert_color(color) + xs, ys = lines_to_bokeh(lines, line_type) + if xs is not None: + assert ys is not None + fig.line(xs, ys, line_color=color, line_alpha=alpha, line_width=linewidth) + + def mask( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike | np.ma.MaskedArray[Any, Any], + ax: figure | int = 0, + color: str = "black", + ) -> None: + """Plot masked out grid points as circles on a single plot. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + z (masked array of shape (ny, nx): z-values. + ax (int or Bokeh Figure, optional): Which plot to use, default ``0``. + color (str, optional): Circle color, default ``"black"``. + """ + mask = np.ma.getmask(z) # type: ignore[no-untyped-call] + if mask is np.ma.nomask: + return + fig = self._get_figure(ax) + color = self._convert_color(color) + x, y = self._grid_as_2d(x, y) + fig.scatter(x[mask], y[mask], fill_color=color, marker="circle", size=10) + + def save( + self, + filename: str, + transparent: bool = False, + *, + webdriver: WebDriver | None = None, + ) -> None: + """Save plots to SVG or PNG file. + + Args: + filename (str): Filename to save to. + transparent (bool, optional): Whether background should be transparent, default + ``False``. + webdriver (WebDriver, optional): Selenium WebDriver instance to use to create the image. + + .. versionadded:: 1.1.1 + + Warning: + To output to SVG file, ``want_svg=True`` must have been passed to the constructor. + """ + if transparent: + for fig in self._figures: + fig.background_fill_color = None + fig.border_fill_color = None + + if self._want_svg: + export_svg(self._layout, filename=filename, webdriver=webdriver) + else: + export_png(self._layout, filename=filename, webdriver=webdriver) + + def save_to_buffer(self, *, webdriver: WebDriver | None = None) -> io.BytesIO: + """Save plots to an ``io.BytesIO`` buffer. + + Args: + webdriver (WebDriver, optional): Selenium WebDriver instance to use to create the image. + + .. versionadded:: 1.1.1 + + Return: + BytesIO: PNG image buffer. + """ + image = get_screenshot_as_png(self._layout, driver=webdriver) + buffer = io.BytesIO() + image.save(buffer, "png") + return buffer + + def show(self) -> None: + """Show plots in web browser, in usual Bokeh manner. + """ + show(self._layout) + + def title(self, title: str, ax: figure | int = 0, color: str | None = None) -> None: + """Set the title of a single plot. + + Args: + title (str): Title text. + ax (int or Bokeh Figure, optional): Which plot to set the title of, default ``0``. + color (str, optional): Color to set title. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``Category10`` palette. Default ``None`` which is ``black``. + """ + fig = self._get_figure(ax) + fig.title = title + fig.title.align = "center" # type: ignore[attr-defined] + if color is not None: + fig.title.text_color = self._convert_color(color) # type: ignore[attr-defined] + + def z_values( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + ax: figure | int = 0, + color: str = "green", + fmt: str = ".1f", + quad_as_tri: bool = False, + ) -> None: + """Show ``z`` values on a single plot. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + z (array-like of shape (ny, nx): z-values. + ax (int or Bokeh Figure, optional): Which plot to use, default ``0``. + color (str, optional): Color of added text. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``Category10`` palette. Default ``"green"``. + fmt (str, optional): Format to display z-values, default ``".1f"``. + quad_as_tri (bool, optional): Whether to show z-values at the ``quad_as_tri`` centres + of quads. + + Warning: + ``quad_as_tri=True`` shows z-values for all quads, even if masked. + """ + fig = self._get_figure(ax) + color = self._convert_color(color) + x, y = self._grid_as_2d(x, y) + z = np.asarray(z) + ny, nx = z.shape + kwargs = {"text_color": color, "text_align": "center", "text_baseline": "middle"} + for j in range(ny): + for i in range(nx): + label = Label(x=x[j, i], y=y[j, i], text=f"{z[j, i]:{fmt}}", **kwargs) # type: ignore[arg-type] + fig.add_layout(label) + if quad_as_tri: + for j in range(ny-1): + for i in range(nx-1): + xx = np.mean(x[j:j+2, i:i+2]) + yy = np.mean(y[j:j+2, i:i+2]) + zz = np.mean(z[j:j+2, i:i+2]) + fig.add_layout(Label(x=xx, y=yy, text=f"{zz:{fmt}}", **kwargs)) # type: ignore[arg-type] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_util.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_util.py new file mode 100644 index 0000000000000000000000000000000000000000..e75eb844536b26dfe90de08bb65328e5d2e5ba1c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/bokeh_util.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, cast + +from contourpy import FillType, LineType +from contourpy.array import offsets_from_codes +from contourpy.convert import convert_lines +from contourpy.dechunk import dechunk_lines + +if TYPE_CHECKING: + from contourpy._contourpy import ( + CoordinateArray, + FillReturn, + LineReturn, + LineReturn_ChunkCombinedNan, + ) + + +def filled_to_bokeh( + filled: FillReturn, + fill_type: FillType, +) -> tuple[list[list[CoordinateArray]], list[list[CoordinateArray]]]: + xs: list[list[CoordinateArray]] = [] + ys: list[list[CoordinateArray]] = [] + if fill_type in (FillType.OuterOffset, FillType.ChunkCombinedOffset, + FillType.OuterCode, FillType.ChunkCombinedCode): + have_codes = fill_type in (FillType.OuterCode, FillType.ChunkCombinedCode) + + for points, offsets in zip(*filled): + if points is None: + continue + if have_codes: + offsets = offsets_from_codes(offsets) + xs.append([]) # New outer with zero or more holes. + ys.append([]) + for i in range(len(offsets)-1): + xys = points[offsets[i]:offsets[i+1]] + xs[-1].append(xys[:, 0]) + ys[-1].append(xys[:, 1]) + elif fill_type in (FillType.ChunkCombinedCodeOffset, FillType.ChunkCombinedOffsetOffset): + for points, codes_or_offsets, outer_offsets in zip(*filled): + if points is None: + continue + for j in range(len(outer_offsets)-1): + if fill_type == FillType.ChunkCombinedCodeOffset: + codes = codes_or_offsets[outer_offsets[j]:outer_offsets[j+1]] + offsets = offsets_from_codes(codes) + outer_offsets[j] + else: + offsets = codes_or_offsets[outer_offsets[j]:outer_offsets[j+1]+1] + xs.append([]) # New outer with zero or more holes. + ys.append([]) + for k in range(len(offsets)-1): + xys = points[offsets[k]:offsets[k+1]] + xs[-1].append(xys[:, 0]) + ys[-1].append(xys[:, 1]) + else: + raise RuntimeError(f"Conversion of FillType {fill_type} to Bokeh is not implemented") + + return xs, ys + + +def lines_to_bokeh( + lines: LineReturn, + line_type: LineType, +) -> tuple[CoordinateArray | None, CoordinateArray | None]: + lines = convert_lines(lines, line_type, LineType.ChunkCombinedNan) + lines = dechunk_lines(lines, LineType.ChunkCombinedNan) + if TYPE_CHECKING: + lines = cast(LineReturn_ChunkCombinedNan, lines) + points = lines[0][0] + if points is None: + return None, None + else: + return points[:, 0], points[:, 1] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/data.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/data.py new file mode 100644 index 0000000000000000000000000000000000000000..5fa75486958c8277430f149daf872e859de8e000 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/data.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +import numpy as np + +if TYPE_CHECKING: + from contourpy._contourpy import CoordinateArray + + +def simple( + shape: tuple[int, int], want_mask: bool = False, +) -> tuple[CoordinateArray, CoordinateArray, CoordinateArray | np.ma.MaskedArray[Any, Any]]: + """Return simple test data consisting of the sum of two gaussians. + + Args: + shape (tuple(int, int)): 2D shape of data to return. + want_mask (bool, optional): Whether test data should be masked or not, default ``False``. + + Return: + Tuple of 3 arrays: ``x``, ``y``, ``z`` test data, ``z`` will be masked if + ``want_mask=True``. + """ + ny, nx = shape + x = np.arange(nx, dtype=np.float64) + y = np.arange(ny, dtype=np.float64) + x, y = np.meshgrid(x, y) + + xscale = nx - 1.0 + yscale = ny - 1.0 + + # z is sum of 2D gaussians. + amp = np.asarray([1.0, -1.0, 0.8, -0.9, 0.7]) + mid = np.asarray([[0.4, 0.2], [0.3, 0.8], [0.9, 0.75], [0.7, 0.3], [0.05, 0.7]]) + width = np.asarray([0.4, 0.2, 0.2, 0.2, 0.1]) + + z = np.zeros_like(x) + for i in range(len(amp)): + z += amp[i]*np.exp(-((x/xscale - mid[i, 0])**2 + (y/yscale - mid[i, 1])**2) / width[i]**2) + + if want_mask: + mask = np.logical_or( + ((x/xscale - 1.0)**2 / 0.2 + (y/yscale - 0.0)**2 / 0.1) < 1.0, + ((x/xscale - 0.2)**2 / 0.02 + (y/yscale - 0.45)**2 / 0.08) < 1.0, + ) + z = np.ma.array(z, mask=mask) # type: ignore[no-untyped-call] + + return x, y, z + + +def random( + shape: tuple[int, int], seed: int = 2187, mask_fraction: float = 0.0, +) -> tuple[CoordinateArray, CoordinateArray, CoordinateArray | np.ma.MaskedArray[Any, Any]]: + """Return random test data in the range 0 to 1. + + Args: + shape (tuple(int, int)): 2D shape of data to return. + seed (int, optional): Seed for random number generator, default 2187. + mask_fraction (float, optional): Fraction of elements to mask, default 0. + + Return: + Tuple of 3 arrays: ``x``, ``y``, ``z`` test data, ``z`` will be masked if + ``mask_fraction`` is greater than zero. + """ + ny, nx = shape + x = np.arange(nx, dtype=np.float64) + y = np.arange(ny, dtype=np.float64) + x, y = np.meshgrid(x, y) + + rng = np.random.default_rng(seed) + z = rng.uniform(size=shape) + + if mask_fraction > 0.0: + mask_fraction = min(mask_fraction, 0.99) + mask = rng.uniform(size=shape) < mask_fraction + z = np.ma.array(z, mask=mask) # type: ignore[no-untyped-call] + + return x, y, z diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_renderer.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..d648cadebff614b9da9b8a926012e6eb54c6aee4 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_renderer.py @@ -0,0 +1,535 @@ +from __future__ import annotations + +import io +from itertools import pairwise +from typing import TYPE_CHECKING, Any, cast + +import matplotlib.collections as mcollections +import matplotlib.pyplot as plt +import numpy as np + +from contourpy import FillType, LineType +from contourpy.convert import convert_filled, convert_lines +from contourpy.enum_util import as_fill_type, as_line_type +from contourpy.util.mpl_util import filled_to_mpl_paths, lines_to_mpl_paths +from contourpy.util.renderer import Renderer + +if TYPE_CHECKING: + from collections.abc import Sequence + + from matplotlib.axes import Axes + from matplotlib.figure import Figure + from numpy.typing import ArrayLike + + import contourpy._contourpy as cpy + + +class MplRenderer(Renderer): + """Utility renderer using Matplotlib to render a grid of plots over the same (x, y) range. + + Args: + nrows (int, optional): Number of rows of plots, default ``1``. + ncols (int, optional): Number of columns of plots, default ``1``. + figsize (tuple(float, float), optional): Figure size in inches, default ``(9, 9)``. + show_frame (bool, optional): Whether to show frame and axes ticks, default ``True``. + backend (str, optional): Matplotlib backend to use or ``None`` for default backend. + Default ``None``. + gridspec_kw (dict, optional): Gridspec keyword arguments to pass to ``plt.subplots``, + default None. + """ + _axes: Sequence[Axes] + _fig: Figure + _want_tight: bool + + def __init__( + self, + nrows: int = 1, + ncols: int = 1, + figsize: tuple[float, float] = (9, 9), + show_frame: bool = True, + backend: str | None = None, + gridspec_kw: dict[str, Any] | None = None, + ) -> None: + if backend is not None: + import matplotlib as mpl + mpl.use(backend) + + kwargs: dict[str, Any] = {"figsize": figsize, "squeeze": False, + "sharex": True, "sharey": True} + if gridspec_kw is not None: + kwargs["gridspec_kw"] = gridspec_kw + else: + kwargs["subplot_kw"] = {"aspect": "equal"} + + self._fig, axes = plt.subplots(nrows, ncols, **kwargs) + self._axes = axes.flatten() + if not show_frame: + for ax in self._axes: + ax.axis("off") + + self._want_tight = True + + def __del__(self) -> None: + if hasattr(self, "_fig"): + plt.close(self._fig) + + def _autoscale(self) -> None: + # Using axes._need_autoscale attribute if need to autoscale before rendering after adding + # lines/filled. Only want to autoscale once per axes regardless of how many lines/filled + # added. + for ax in self._axes: + if getattr(ax, "_need_autoscale", False): + ax.autoscale_view(tight=True) + ax._need_autoscale = False # type: ignore[attr-defined] + if self._want_tight and len(self._axes) > 1: + self._fig.tight_layout() + + def _get_ax(self, ax: Axes | int) -> Axes: + if isinstance(ax, int): + ax = self._axes[ax] + return ax + + def filled( + self, + filled: cpy.FillReturn, + fill_type: FillType | str, + ax: Axes | int = 0, + color: str = "C0", + alpha: float = 0.7, + ) -> None: + """Plot filled contours on a single Axes. + + Args: + filled (sequence of arrays): Filled contour data as returned by + :meth:`~.ContourGenerator.filled`. + fill_type (FillType or str): Type of :meth:`~.ContourGenerator.filled` data as returned + by :attr:`~.ContourGenerator.fill_type`, or string equivalent + ax (int or Maplotlib Axes, optional): Which axes to plot on, default ``0``. + color (str, optional): Color to plot with. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``tab10`` colormap. Default ``"C0"``. + alpha (float, optional): Opacity to plot with, default ``0.7``. + """ + fill_type = as_fill_type(fill_type) + ax = self._get_ax(ax) + paths = filled_to_mpl_paths(filled, fill_type) + collection = mcollections.PathCollection( + paths, facecolors=color, edgecolors="none", lw=0, alpha=alpha) + ax.add_collection(collection) + ax._need_autoscale = True # type: ignore[attr-defined] + + def grid( + self, + x: ArrayLike, + y: ArrayLike, + ax: Axes | int = 0, + color: str = "black", + alpha: float = 0.1, + point_color: str | None = None, + quad_as_tri_alpha: float = 0, + ) -> None: + """Plot quad grid lines on a single Axes. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + ax (int or Matplotlib Axes, optional): Which Axes to plot on, default ``0``. + color (str, optional): Color to plot grid lines, default ``"black"``. + alpha (float, optional): Opacity to plot lines with, default ``0.1``. + point_color (str, optional): Color to plot grid points or ``None`` if grid points + should not be plotted, default ``None``. + quad_as_tri_alpha (float, optional): Opacity to plot ``quad_as_tri`` grid, default 0. + + Colors may be a string color or the letter ``"C"`` followed by an integer in the range + ``"C0"`` to ``"C9"`` to use a color from the ``tab10`` colormap. + + Warning: + ``quad_as_tri_alpha > 0`` plots all quads as though they are unmasked. + """ + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + kwargs: dict[str, Any] = {"color": color, "alpha": alpha} + ax.plot(x, y, x.T, y.T, **kwargs) + if quad_as_tri_alpha > 0: + # Assumes no quad mask. + xmid = 0.25*(x[:-1, :-1] + x[1:, :-1] + x[:-1, 1:] + x[1:, 1:]) + ymid = 0.25*(y[:-1, :-1] + y[1:, :-1] + y[:-1, 1:] + y[1:, 1:]) + kwargs["alpha"] = quad_as_tri_alpha + ax.plot( + np.stack((x[:-1, :-1], xmid, x[1:, 1:])).reshape((3, -1)), + np.stack((y[:-1, :-1], ymid, y[1:, 1:])).reshape((3, -1)), + np.stack((x[1:, :-1], xmid, x[:-1, 1:])).reshape((3, -1)), + np.stack((y[1:, :-1], ymid, y[:-1, 1:])).reshape((3, -1)), + **kwargs) + if point_color is not None: + ax.plot(x, y, color=point_color, alpha=alpha, marker="o", lw=0) + ax._need_autoscale = True # type: ignore[attr-defined] + + def lines( + self, + lines: cpy.LineReturn, + line_type: LineType | str, + ax: Axes | int = 0, + color: str = "C0", + alpha: float = 1.0, + linewidth: float = 1, + ) -> None: + """Plot contour lines on a single Axes. + + Args: + lines (sequence of arrays): Contour line data as returned by + :meth:`~.ContourGenerator.lines`. + line_type (LineType or str): Type of :meth:`~.ContourGenerator.lines` data as returned + by :attr:`~.ContourGenerator.line_type`, or string equivalent. + ax (int or Matplotlib Axes, optional): Which Axes to plot on, default ``0``. + color (str, optional): Color to plot lines. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``tab10`` colormap. Default ``"C0"``. + alpha (float, optional): Opacity to plot lines with, default ``1.0``. + linewidth (float, optional): Width of lines, default ``1``. + """ + line_type = as_line_type(line_type) + ax = self._get_ax(ax) + paths = lines_to_mpl_paths(lines, line_type) + collection = mcollections.PathCollection( + paths, facecolors="none", edgecolors=color, lw=linewidth, alpha=alpha) + ax.add_collection(collection) + ax._need_autoscale = True # type: ignore[attr-defined] + + def mask( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike | np.ma.MaskedArray[Any, Any], + ax: Axes | int = 0, + color: str = "black", + ) -> None: + """Plot masked out grid points as circles on a single Axes. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + z (masked array of shape (ny, nx): z-values. + ax (int or Matplotlib Axes, optional): Which Axes to plot on, default ``0``. + color (str, optional): Circle color, default ``"black"``. + """ + mask = np.ma.getmask(z) # type: ignore[no-untyped-call] + if mask is np.ma.nomask: + return + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + ax.plot(x[mask], y[mask], "o", c=color) + + def save(self, filename: str, transparent: bool = False) -> None: + """Save plots to SVG or PNG file. + + Args: + filename (str): Filename to save to. + transparent (bool, optional): Whether background should be transparent, default + ``False``. + """ + self._autoscale() + self._fig.savefig(filename, transparent=transparent) + + def save_to_buffer(self) -> io.BytesIO: + """Save plots to an ``io.BytesIO`` buffer. + + Return: + BytesIO: PNG image buffer. + """ + self._autoscale() + buf = io.BytesIO() + self._fig.savefig(buf, format="png") + buf.seek(0) + return buf + + def show(self) -> None: + """Show plots in an interactive window, in the usual Matplotlib manner. + """ + self._autoscale() + plt.show() + + def title(self, title: str, ax: Axes | int = 0, color: str | None = None) -> None: + """Set the title of a single Axes. + + Args: + title (str): Title text. + ax (int or Matplotlib Axes, optional): Which Axes to set the title of, default ``0``. + color (str, optional): Color to set title. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``tab10`` colormap. Default is ``None`` which uses Matplotlib's default title color + that depends on the stylesheet in use. + """ + if color: + self._get_ax(ax).set_title(title, color=color) + else: + self._get_ax(ax).set_title(title) + + def z_values( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + ax: Axes | int = 0, + color: str = "green", + fmt: str = ".1f", + quad_as_tri: bool = False, + ) -> None: + """Show ``z`` values on a single Axes. + + Args: + x (array-like of shape (ny, nx) or (nx,)): The x-coordinates of the grid points. + y (array-like of shape (ny, nx) or (ny,)): The y-coordinates of the grid points. + z (array-like of shape (ny, nx): z-values. + ax (int or Matplotlib Axes, optional): Which Axes to plot on, default ``0``. + color (str, optional): Color of added text. May be a string color or the letter ``"C"`` + followed by an integer in the range ``"C0"`` to ``"C9"`` to use a color from the + ``tab10`` colormap. Default ``"green"``. + fmt (str, optional): Format to display z-values, default ``".1f"``. + quad_as_tri (bool, optional): Whether to show z-values at the ``quad_as_tri`` centers + of quads. + + Warning: + ``quad_as_tri=True`` shows z-values for all quads, even if masked. + """ + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + z = np.asarray(z) + ny, nx = z.shape + for j in range(ny): + for i in range(nx): + ax.text(x[j, i], y[j, i], f"{z[j, i]:{fmt}}", ha="center", va="center", + color=color, clip_on=True) + if quad_as_tri: + for j in range(ny-1): + for i in range(nx-1): + xx = np.mean(x[j:j+2, i:i+2], dtype=np.float64) + yy = np.mean(y[j:j+2, i:i+2], dtype=np.float64) + zz = np.mean(z[j:j+2, i:i+2]) + ax.text(xx, yy, f"{zz:{fmt}}", ha="center", va="center", color=color, + clip_on=True) + + +class MplTestRenderer(MplRenderer): + """Test renderer implemented using Matplotlib. + + No whitespace around plots and no spines/ticks displayed. + Uses Agg backend, so can only save to file/buffer, cannot call ``show()``. + """ + def __init__( + self, + nrows: int = 1, + ncols: int = 1, + figsize: tuple[float, float] = (9, 9), + ) -> None: + gridspec = { + "left": 0.01, + "right": 0.99, + "top": 0.99, + "bottom": 0.01, + "wspace": 0.01, + "hspace": 0.01, + } + super().__init__( + nrows, ncols, figsize, show_frame=True, backend="Agg", gridspec_kw=gridspec, + ) + + for ax in self._axes: + ax.set_xmargin(0.0) + ax.set_ymargin(0.0) + ax.set_xticks([]) + ax.set_yticks([]) + + self._want_tight = False + + +class MplDebugRenderer(MplRenderer): + """Debug renderer implemented using Matplotlib. + + Extends ``MplRenderer`` to add extra information to help in debugging such as markers, arrows, + text, etc. + """ + def __init__( + self, + nrows: int = 1, + ncols: int = 1, + figsize: tuple[float, float] = (9, 9), + show_frame: bool = True, + ) -> None: + super().__init__(nrows, ncols, figsize, show_frame) + + def _arrow( + self, + ax: Axes, + line_start: cpy.CoordinateArray, + line_end: cpy.CoordinateArray, + color: str, + alpha: float, + arrow_size: float, + ) -> None: + mid = 0.5*(line_start + line_end) + along = line_end - line_start + along /= np.sqrt(np.dot(along, along)) # Unit vector. + right = np.asarray((along[1], -along[0])) + arrow = np.stack(( + mid - (along*0.5 - right)*arrow_size, + mid + along*0.5*arrow_size, + mid - (along*0.5 + right)*arrow_size, + )) + ax.plot(arrow[:, 0], arrow[:, 1], "-", c=color, alpha=alpha) + + def filled( + self, + filled: cpy.FillReturn, + fill_type: FillType | str, + ax: Axes | int = 0, + color: str = "C1", + alpha: float = 0.7, + line_color: str = "C0", + line_alpha: float = 0.7, + point_color: str = "C0", + start_point_color: str = "red", + arrow_size: float = 0.1, + ) -> None: + fill_type = as_fill_type(fill_type) + super().filled(filled, fill_type, ax, color, alpha) + + if line_color is None and point_color is None: + return + + ax = self._get_ax(ax) + filled = convert_filled(filled, fill_type, FillType.ChunkCombinedOffset) + + # Lines. + if line_color is not None: + for points, offsets in zip(*filled): + if points is None: + continue + for start, end in pairwise(offsets): + xys = points[start:end] + ax.plot(xys[:, 0], xys[:, 1], c=line_color, alpha=line_alpha) + + if arrow_size > 0.0: + n = len(xys) + for i in range(n-1): + self._arrow(ax, xys[i], xys[i+1], line_color, line_alpha, arrow_size) + + # Points. + if point_color is not None: + for points, offsets in zip(*filled): + if points is None: + continue + mask = np.ones(offsets[-1], dtype=bool) + mask[offsets[1:]-1] = False # Exclude end points. + if start_point_color is not None: + start_indices = offsets[:-1] + mask[start_indices] = False # Exclude start points. + ax.plot( + points[:, 0][mask], points[:, 1][mask], "o", c=point_color, alpha=line_alpha) + + if start_point_color is not None: + ax.plot(points[:, 0][start_indices], points[:, 1][start_indices], "o", + c=start_point_color, alpha=line_alpha) + + def lines( + self, + lines: cpy.LineReturn, + line_type: LineType | str, + ax: Axes | int = 0, + color: str = "C0", + alpha: float = 1.0, + linewidth: float = 1, + point_color: str = "C0", + start_point_color: str = "red", + arrow_size: float = 0.1, + ) -> None: + line_type = as_line_type(line_type) + super().lines(lines, line_type, ax, color, alpha, linewidth) + + if arrow_size == 0.0 and point_color is None: + return + + ax = self._get_ax(ax) + separate_lines = convert_lines(lines, line_type, LineType.Separate) + if TYPE_CHECKING: + separate_lines = cast(cpy.LineReturn_Separate, separate_lines) + + if arrow_size > 0.0: + for line in separate_lines: + for i in range(len(line)-1): + self._arrow(ax, line[i], line[i+1], color, alpha, arrow_size) + + if point_color is not None: + for line in separate_lines: + start_index = 0 + end_index = len(line) + if start_point_color is not None: + ax.plot(line[0, 0], line[0, 1], "o", c=start_point_color, alpha=alpha) + start_index = 1 + if line[0][0] == line[-1][0] and line[0][1] == line[-1][1]: + end_index -= 1 + ax.plot(line[start_index:end_index, 0], line[start_index:end_index, 1], "o", + c=color, alpha=alpha) + + def point_numbers( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + ax: Axes | int = 0, + color: str = "red", + ) -> None: + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + z = np.asarray(z) + ny, nx = z.shape + for j in range(ny): + for i in range(nx): + quad = i + j*nx + ax.text(x[j, i], y[j, i], str(quad), ha="right", va="top", color=color, + clip_on=True) + + def quad_numbers( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + ax: Axes | int = 0, + color: str = "blue", + ) -> None: + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + z = np.asarray(z) + ny, nx = z.shape + for j in range(1, ny): + for i in range(1, nx): + quad = i + j*nx + xmid = x[j-1:j+1, i-1:i+1].mean() + ymid = y[j-1:j+1, i-1:i+1].mean() + ax.text(xmid, ymid, str(quad), ha="center", va="center", color=color, clip_on=True) + + def z_levels( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + lower_level: float, + upper_level: float | None = None, + ax: Axes | int = 0, + color: str = "green", + ) -> None: + ax = self._get_ax(ax) + x, y = self._grid_as_2d(x, y) + z = np.asarray(z) + ny, nx = z.shape + for j in range(ny): + for i in range(nx): + zz = z[j, i] + if upper_level is not None and zz > upper_level: + z_level = 2 + elif zz > lower_level: + z_level = 1 + else: + z_level = 0 + ax.text(x[j, i], y[j, i], str(z_level), ha="left", va="bottom", color=color, + clip_on=True) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_util.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_util.py new file mode 100644 index 0000000000000000000000000000000000000000..d8587798c499a9ab898ee6cb54738be7e41bf7cf --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/mpl_util.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from itertools import pairwise +from typing import TYPE_CHECKING, cast + +import matplotlib.path as mpath +import numpy as np + +from contourpy import FillType, LineType +from contourpy.array import codes_from_offsets + +if TYPE_CHECKING: + from contourpy._contourpy import FillReturn, LineReturn, LineReturn_Separate + + +def filled_to_mpl_paths(filled: FillReturn, fill_type: FillType) -> list[mpath.Path]: + if fill_type in (FillType.OuterCode, FillType.ChunkCombinedCode): + paths = [mpath.Path(points, codes) for points, codes in zip(*filled) if points is not None] + elif fill_type in (FillType.OuterOffset, FillType.ChunkCombinedOffset): + paths = [mpath.Path(points, codes_from_offsets(offsets)) + for points, offsets in zip(*filled) if points is not None] + elif fill_type == FillType.ChunkCombinedCodeOffset: + paths = [] + for points, codes, outer_offsets in zip(*filled): + if points is None: + continue + points = np.split(points, outer_offsets[1:-1]) + codes = np.split(codes, outer_offsets[1:-1]) + paths += [mpath.Path(p, c) for p, c in zip(points, codes)] + elif fill_type == FillType.ChunkCombinedOffsetOffset: + paths = [] + for points, offsets, outer_offsets in zip(*filled): + if points is None: + continue + for i in range(len(outer_offsets)-1): + offs = offsets[outer_offsets[i]:outer_offsets[i+1]+1] + pts = points[offs[0]:offs[-1]] + paths += [mpath.Path(pts, codes_from_offsets(offs - offs[0]))] + else: + raise RuntimeError(f"Conversion of FillType {fill_type} to MPL Paths is not implemented") + return paths + + +def lines_to_mpl_paths(lines: LineReturn, line_type: LineType) -> list[mpath.Path]: + if line_type == LineType.Separate: + if TYPE_CHECKING: + lines = cast(LineReturn_Separate, lines) + paths = [] + for line in lines: + # Drawing as Paths so that they can be closed correctly. + closed = line[0, 0] == line[-1, 0] and line[0, 1] == line[-1, 1] + paths.append(mpath.Path(line, closed=closed)) + elif line_type in (LineType.SeparateCode, LineType.ChunkCombinedCode): + paths = [mpath.Path(points, codes) for points, codes in zip(*lines) if points is not None] + elif line_type == LineType.ChunkCombinedOffset: + paths = [] + for points, offsets in zip(*lines): + if points is None: + continue + for i in range(len(offsets)-1): + line = points[offsets[i]:offsets[i+1]] + closed = line[0, 0] == line[-1, 0] and line[0, 1] == line[-1, 1] + paths.append(mpath.Path(line, closed=closed)) + elif line_type == LineType.ChunkCombinedNan: + paths = [] + for points in lines[0]: + if points is None: + continue + nan_offsets = np.nonzero(np.isnan(points[:, 0]))[0] + nan_offsets = np.concatenate([[-1], nan_offsets, [len(points)]]) + for s, e in pairwise(nan_offsets): + line = points[s+1:e] + closed = line[0, 0] == line[-1, 0] and line[0, 1] == line[-1, 1] + paths.append(mpath.Path(line, closed=closed)) + else: + raise RuntimeError(f"Conversion of LineType {line_type} to MPL Paths is not implemented") + return paths diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/renderer.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..716569f776c4df9197a36f40e9f0a5e176292114 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/contourpy/util/renderer.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +import numpy as np + +if TYPE_CHECKING: + import io + + from numpy.typing import ArrayLike + + from contourpy._contourpy import CoordinateArray, FillReturn, FillType, LineReturn, LineType + + +class Renderer(ABC): + """Abstract base class for renderers.""" + + def _grid_as_2d(self, x: ArrayLike, y: ArrayLike) -> tuple[CoordinateArray, CoordinateArray]: + x = np.asarray(x) + y = np.asarray(y) + if x.ndim == 1: + x, y = np.meshgrid(x, y) + return x, y + + @abstractmethod + def filled( + self, + filled: FillReturn, + fill_type: FillType | str, + ax: Any = 0, + color: str = "C0", + alpha: float = 0.7, + ) -> None: + pass + + @abstractmethod + def grid( + self, + x: ArrayLike, + y: ArrayLike, + ax: Any = 0, + color: str = "black", + alpha: float = 0.1, + point_color: str | None = None, + quad_as_tri_alpha: float = 0, + ) -> None: + pass + + @abstractmethod + def lines( + self, + lines: LineReturn, + line_type: LineType | str, + ax: Any = 0, + color: str = "C0", + alpha: float = 1.0, + linewidth: float = 1, + ) -> None: + pass + + @abstractmethod + def mask( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike | np.ma.MaskedArray[Any, Any], + ax: Any = 0, + color: str = "black", + ) -> None: + pass + + def multi_filled( + self, + multi_filled: list[FillReturn], + fill_type: FillType | str, + ax: Any = 0, + color: str | None = None, + **kwargs: Any, + ) -> None: + """Plot multiple sets of filled contours on a single axes. + + Args: + multi_filled (list of filled contour arrays): Multiple filled contour sets as returned + by :meth:`.ContourGenerator.multi_filled`. + fill_type (FillType or str): Type of filled data as returned by + :attr:`~.ContourGenerator.fill_type`, or string equivalent. + ax (int or Renderer-specific axes or figure object, optional): Which axes to plot on, + default ``0``. + color (str or None, optional): If a string color then this same color is used for all + filled contours. If ``None``, the default, then the filled contour sets use colors + from the ``tab10`` colormap in order, wrapping around to the beginning if more than + 10 sets of filled contours are rendered. + kwargs: All other keyword argument are passed on to + :meth:`.Renderer.filled` unchanged. + + .. versionadded:: 1.3.0 + """ + if color is not None: + kwargs["color"] = color + for i, filled in enumerate(multi_filled): + if color is None: + kwargs["color"] = f"C{i % 10}" + self.filled(filled, fill_type, ax, **kwargs) + + def multi_lines( + self, + multi_lines: list[LineReturn], + line_type: LineType | str, + ax: Any = 0, + color: str | None = None, + **kwargs: Any, + ) -> None: + """Plot multiple sets of contour lines on a single axes. + + Args: + multi_lines (list of contour line arrays): Multiple contour line sets as returned by + :meth:`.ContourGenerator.multi_lines`. + line_type (LineType or str): Type of line data as returned by + :attr:`~.ContourGenerator.line_type`, or string equivalent. + ax (int or Renderer-specific axes or figure object, optional): Which axes to plot on, + default ``0``. + color (str or None, optional): If a string color then this same color is used for all + lines. If ``None``, the default, then the line sets use colors from the ``tab10`` + colormap in order, wrapping around to the beginning if more than 10 sets of lines + are rendered. + kwargs: All other keyword argument are passed on to + :meth:`Renderer.lines` unchanged. + + .. versionadded:: 1.3.0 + """ + if color is not None: + kwargs["color"] = color + for i, lines in enumerate(multi_lines): + if color is None: + kwargs["color"] = f"C{i % 10}" + self.lines(lines, line_type, ax, **kwargs) + + @abstractmethod + def save(self, filename: str, transparent: bool = False) -> None: + pass + + @abstractmethod + def save_to_buffer(self) -> io.BytesIO: + pass + + @abstractmethod + def show(self) -> None: + pass + + @abstractmethod + def title(self, title: str, ax: Any = 0, color: str | None = None) -> None: + pass + + @abstractmethod + def z_values( + self, + x: ArrayLike, + y: ArrayLike, + z: ArrayLike, + ax: Any = 0, + color: str = "green", + fmt: str = ".1f", + quad_as_tri: bool = False, + ) -> None: + pass diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/distutils-precedence.pth b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/distutils-precedence.pth new file mode 100644 index 0000000000000000000000000000000000000000..c659194195f07bd6f19b5522515551309af14a3d --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/distutils-precedence.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2638ce9e2500e572a5e0de7faed6661eb569d1b696fcba07b0dd223da5f5d224 +size 151 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..5f79fef6fce65a4beef574eb07e64a6a40e1008d --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87ffb69cda69e138e9379d211599ef21bd0f142fcf400e78bfb187ca510d0fb9 +size 1024424 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..507896cc102fba8ae63de6e2eb22f9ee0b150caf --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1957b028ab8af856ac774507f0dc4755ae2e59cf9243f513ad90678583ce560c +size 1414432 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..676c61039c62ab1dbe367179a5b044e113bcc934 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0bba19fdd711e02fbc29d772e313ef511939a56a5cee9f88d192e023c6d79624 +size 4714576 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..196f8ca1b517c65876989badf43dfaa7364d5bd5 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0635c9682db43ff193dfb6686186f67a2549c7331f4d58303f0cea8b4b072199 +size 916288 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..6698f390ab57ad087a0d0c52f3663c8b8bdc749a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e6e546258484a18ea729ee84f5591143e6b376e79c711454e2de9d23adb4b0f +size 1120912 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..dc6aff2966ea6d54c29ad9204c15b1c0ec10e720 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5daf0eafee222c8254bdccc814a454952e20a29168e811e758b09c2a4732793 +size 1568136 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/INSTALLER b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/METADATA b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..19786640848c278dd2320bdd36766d38542be1d6 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/METADATA @@ -0,0 +1,2161 @@ +Metadata-Version: 2.4 +Name: fonttools +Version: 4.59.0 +Summary: Tools to manipulate font files +Home-page: http://github.com/fonttools/fonttools +Author: Just van Rossum +Author-email: just@letterror.com +Maintainer: Behdad Esfahbod +Maintainer-email: behdad@behdad.org +License: MIT +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: End Users/Desktop +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Text Processing :: Fonts +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.external +Provides-Extra: ufo +Provides-Extra: lxml +Requires-Dist: lxml>=4.0; extra == "lxml" +Provides-Extra: woff +Requires-Dist: brotli>=1.0.1; platform_python_implementation == "CPython" and extra == "woff" +Requires-Dist: brotlicffi>=0.8.0; platform_python_implementation != "CPython" and extra == "woff" +Requires-Dist: zopfli>=0.1.4; extra == "woff" +Provides-Extra: unicode +Requires-Dist: unicodedata2>=15.1.0; python_version <= "3.12" and extra == "unicode" +Provides-Extra: graphite +Requires-Dist: lz4>=1.7.4.2; extra == "graphite" +Provides-Extra: interpolatable +Requires-Dist: scipy; platform_python_implementation != "PyPy" and extra == "interpolatable" +Requires-Dist: munkres; platform_python_implementation == "PyPy" and extra == "interpolatable" +Requires-Dist: pycairo; extra == "interpolatable" +Provides-Extra: plot +Requires-Dist: matplotlib; extra == "plot" +Provides-Extra: symfont +Requires-Dist: sympy; extra == "symfont" +Provides-Extra: type1 +Requires-Dist: xattr; sys_platform == "darwin" and extra == "type1" +Provides-Extra: pathops +Requires-Dist: skia-pathops>=0.5.0; extra == "pathops" +Provides-Extra: repacker +Requires-Dist: uharfbuzz>=0.23.0; extra == "repacker" +Provides-Extra: all +Requires-Dist: lxml>=4.0; extra == "all" +Requires-Dist: brotli>=1.0.1; platform_python_implementation == "CPython" and extra == "all" +Requires-Dist: brotlicffi>=0.8.0; platform_python_implementation != "CPython" and extra == "all" +Requires-Dist: zopfli>=0.1.4; extra == "all" +Requires-Dist: unicodedata2>=15.1.0; python_version <= "3.12" and extra == "all" +Requires-Dist: lz4>=1.7.4.2; extra == "all" +Requires-Dist: scipy; platform_python_implementation != "PyPy" and extra == "all" +Requires-Dist: munkres; platform_python_implementation == "PyPy" and extra == "all" +Requires-Dist: pycairo; extra == "all" +Requires-Dist: matplotlib; extra == "all" +Requires-Dist: sympy; extra == "all" +Requires-Dist: xattr; sys_platform == "darwin" and extra == "all" +Requires-Dist: skia-pathops>=0.5.0; extra == "all" +Requires-Dist: uharfbuzz>=0.23.0; extra == "all" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: platform +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +|CI Build Status| |Coverage Status| |PyPI| |Gitter Chat| + +What is this? +~~~~~~~~~~~~~ + +| fontTools is a library for manipulating fonts, written in Python. The + project includes the TTX tool, that can convert TrueType and OpenType + fonts to and from an XML text format, which is also called TTX. It + supports TrueType, OpenType, AFM and to an extent Type 1 and some + Mac-specific formats. The project has an `MIT open-source + license `__. +| Among other things this means you can use it free of charge. + +`User documentation `_ and +`developer documentation `_ +are available at `Read the Docs `_. + +Installation +~~~~~~~~~~~~ + +FontTools requires `Python `__ 3.9 +or later. We try to follow the same schedule of minimum Python version support as +NumPy (see `NEP 29 `__). + +The package is listed in the Python Package Index (PyPI), so you can +install it with `pip `__: + +.. code:: sh + + pip install fonttools + +If you would like to contribute to its development, you can clone the +repository from GitHub, install the package in 'editable' mode and +modify the source code in place. We recommend creating a virtual +environment, using `virtualenv `__ or +Python 3 `venv `__ module. + +.. code:: sh + + # download the source code to 'fonttools' folder + git clone https://github.com/fonttools/fonttools.git + cd fonttools + + # create new virtual environment called e.g. 'fonttools-venv', or anything you like + python -m virtualenv fonttools-venv + + # source the `activate` shell script to enter the environment (Unix-like); to exit, just type `deactivate` + . fonttools-venv/bin/activate + + # to activate the virtual environment in Windows `cmd.exe`, do + fonttools-venv\Scripts\activate.bat + + # install in 'editable' mode + pip install -e . + +Optional Requirements +--------------------- + +The ``fontTools`` package currently has no (required) external dependencies +besides the modules included in the Python Standard Library. +However, a few extra dependencies are required by some of its modules, which +are needed to unlock optional features. +The ``fonttools`` PyPI distribution also supports so-called "extras", i.e. a +set of keywords that describe a group of additional dependencies, which can be +used when installing via pip, or when specifying a requirement. +For example: + +.. code:: sh + + pip install fonttools[ufo,lxml,woff,unicode] + +This command will install fonttools, as well as the optional dependencies that +are required to unlock the extra features named "ufo", etc. + +- ``Lib/fontTools/misc/etree.py`` + + The module exports a ElementTree-like API for reading/writing XML files, and + allows to use as the backend either the built-in ``xml.etree`` module or + `lxml `__. The latter is preferred whenever present, + as it is generally faster and more secure. + + *Extra:* ``lxml`` + +- ``Lib/fontTools/ufoLib`` + + Package for reading and writing UFO source files; it requires: + + * `fs `__: (aka ``pyfilesystem2``) filesystem + abstraction layer. + + *Extra:* ``ufo`` + +- ``Lib/fontTools/ttLib/woff2.py`` + + Module to compress/decompress WOFF 2.0 web fonts; it requires: + + * `brotli `__: Python bindings of + the Brotli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/ttLib/sfnt.py`` + + To better compress WOFF 1.0 web fonts, the following module can be used + instead of the built-in ``zlib`` library: + + * `zopfli `__: Python bindings of + the Zopfli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/unicode.py`` + + To display the Unicode character names when dumping the ``cmap`` table + with ``ttx`` we use the ``unicodedata`` module in the Standard Library. + The version included in there varies between different Python versions. + To use the latest available data, you can install: + + * `unicodedata2 `__: + ``unicodedata`` backport for Python 3.x updated to the latest Unicode + version 15.0. + + *Extra:* ``unicode`` + +- ``Lib/fontTools/varLib/interpolatable.py`` + + Module for finding wrong contour/component order between different masters. + It requires one of the following packages in order to solve the so-called + "minimum weight perfect matching problem in bipartite graphs", or + the Assignment problem: + + * `scipy `__: the Scientific Library + for Python, which internally uses `NumPy `__ + arrays and hence is very fast; + * `munkres `__: a pure-Python + module that implements the Hungarian or Kuhn-Munkres algorithm. + + To plot the results to a PDF or HTML format, you also need to install: + + * `pycairo `__: Python bindings for the + Cairo graphics library. Note that wheels are currently only available for + Windows, for other platforms see pycairo's `installation instructions + `__. + + *Extra:* ``interpolatable`` + +- ``Lib/fontTools/varLib/plot.py`` + + Module for visualizing DesignSpaceDocument and resulting VariationModel. + + * `matplotlib `__: 2D plotting library. + + *Extra:* ``plot`` + +- ``Lib/fontTools/misc/symfont.py`` + + Advanced module for symbolic font statistics analysis; it requires: + + * `sympy `__: the Python library for + symbolic mathematics. + + *Extra:* ``symfont`` + +- ``Lib/fontTools/t1Lib.py`` + + To get the file creator and type of Macintosh PostScript Type 1 fonts + on Python 3 you need to install the following module, as the old ``MacOS`` + module is no longer included in Mac Python: + + * `xattr `__: Python wrapper for + extended filesystem attributes (macOS platform only). + + *Extra:* ``type1`` + +- ``Lib/fontTools/ttLib/removeOverlaps.py`` + + Simplify TrueType glyphs by merging overlapping contours and components. + + * `skia-pathops `__: Python + bindings for the Skia library's PathOps module, performing boolean + operations on paths (union, intersection, etc.). + + *Extra:* ``pathops`` + +- ``Lib/fontTools/pens/cocoaPen.py`` and ``Lib/fontTools/pens/quartzPen.py`` + + Pens for drawing glyphs with Cocoa ``NSBezierPath`` or ``CGPath`` require: + + * `PyObjC `__: the bridge between + Python and the Objective-C runtime (macOS platform only). + +- ``Lib/fontTools/pens/qtPen.py`` + + Pen for drawing glyphs with Qt's ``QPainterPath``, requires: + + * `PyQt5 `__: Python bindings for + the Qt cross platform UI and application toolkit. + +- ``Lib/fontTools/pens/reportLabPen.py`` + + Pen to drawing glyphs as PNG images, requires: + + * `reportlab `__: Python toolkit + for generating PDFs and graphics. + +- ``Lib/fontTools/pens/freetypePen.py`` + + Pen to drawing glyphs with FreeType as raster images, requires: + + * `freetype-py `__: Python binding + for the FreeType library. + +- ``Lib/fontTools/ttLib/tables/otBase.py`` + + Use the Harfbuzz library to serialize GPOS/GSUB using ``hb_repack`` method, requires: + + * `uharfbuzz `__: Streamlined Cython + bindings for the harfbuzz shaping engine + + *Extra:* ``repacker`` + +How to make a new release +~~~~~~~~~~~~~~~~~~~~~~~~~ + +1) Update ``NEWS.rst`` with all the changes since the last release. Write a + changelog entry for each PR, with one or two short sentences summarizing it, + as well as links to the PR and relevant issues addressed by the PR. Do not + put a new title, the next command will do it for you. +2) Use semantic versioning to decide whether the new release will be a 'major', + 'minor' or 'patch' release. It's usually one of the latter two, depending on + whether new backward compatible APIs were added, or simply some bugs were fixed. +3) From inside a venv, first do ``pip install -r dev-requirements.txt``, then run + the ``python setup.py release`` command from the tip of the ``main`` branch. + By default this bumps the third or 'patch' digit only, unless you pass ``--major`` + or ``--minor`` to bump respectively the first or second digit. + This bumps the package version string, extracts the changes since the latest + version from ``NEWS.rst``, and uses that text to create an annotated git tag + (or a signed git tag if you pass the ``--sign`` option and your git and Github + account are configured for `signing commits `__ + using a GPG key). + It also commits an additional version bump which opens the main branch for + the subsequent developmental cycle +4) Push both the tag and commit to the upstream repository, by running the command + ``git push --follow-tags``. Note: it may push other local tags as well, be + careful. +5) Let the CI build the wheel and source distribution packages and verify both + get uploaded to the Python Package Index (PyPI). +6) [Optional] Go to fonttools `Github Releases `__ + page and create a new release, copy-pasting the content of the git tag + message. This way, the release notes are nicely formatted as markdown, and + users watching the repo will get an email notification. One day we shall + automate that too. + + +Acknowledgments +~~~~~~~~~~~~~~~~ + +In alphabetical order: + +aschmitz, Olivier Berten, Samyak Bhuta, Erik van Blokland, Petr van Blokland, +Jelle Bosma, Sascha Brawer, Tom Byrer, Antonio Cavedoni, Frédéric Coiffier, +Vincent Connare, David Corbett, Simon Cozens, Dave Crossland, Simon Daniels, +Peter Dekkers, Behdad Esfahbod, Behnam Esfahbod, Hannes Famira, Sam Fishman, +Matt Fontaine, Takaaki Fuji, Rob Hagemans, Yannis Haralambous, Greg Hitchcock, +Jeremie Hornus, Khaled Hosny, John Hudson, Denis Moyogo Jacquerye, Jack Jansen, +Tom Kacvinsky, Jens Kutilek, Antoine Leca, Werner Lemberg, Tal Leming, Liang Hai, Peter +Lofting, Cosimo Lupo, Olli Meier, Masaya Nakamura, Dave Opstad, Laurence Penney, +Roozbeh Pournader, Garret Rieger, Read Roberts, Colin Rofls, Guido van Rossum, +Just van Rossum, Andreas Seidel, Georg Seifert, Chris Simpkins, Miguel Sousa, +Adam Twardoch, Adrien Tétar, Vitaly Volkov, Paul Wise. + +Copyrights +~~~~~~~~~~ + +| Copyright (c) 1999-2004 Just van Rossum, LettError + (just@letterror.com) +| See `LICENSE `__ for the full license. + +Copyright (c) 2000 BeOpen.com. All Rights Reserved. + +Copyright (c) 1995-2001 Corporation for National Research Initiatives. +All Rights Reserved. + +Copyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam. All +Rights Reserved. + +Have fun! + +.. |CI Build Status| image:: https://github.com/fonttools/fonttools/workflows/Test/badge.svg + :target: https://github.com/fonttools/fonttools/actions?query=workflow%3ATest +.. |Coverage Status| image:: https://codecov.io/gh/fonttools/fonttools/branch/main/graph/badge.svg + :target: https://codecov.io/gh/fonttools/fonttools +.. |PyPI| image:: https://img.shields.io/pypi/v/fonttools.svg + :target: https://pypi.org/project/FontTools +.. |Gitter Chat| image:: https://badges.gitter.im/fonttools-dev/Lobby.svg + :alt: Join the chat at https://gitter.im/fonttools-dev/Lobby + :target: https://gitter.im/fonttools-dev/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +Changelog +~~~~~~~~~ + +4.59.0 (released 2025-07-16) +---------------------------- + +- Removed hard-dependency on pyfilesystem2 (``fs`` package) from ``fonttools[ufo]`` extra. + This is replaced by the `fontTools.misc.filesystem` package, a stdlib-only, drop-in + replacement for the subset of the pyfilesystem2's API used by ``fontTools.ufoLib``. + The latter should continue to work with the upstream ``fs`` (we even test with/without). + Clients who wish to continue using ``fs`` can do so by depending on it directly instead + of via the ``fonttools[ufo]`` extra (#3885, #3620). +- [xmlWriter] Replace illegal XML characters (e.g. control or non-characters) with "?" + when dumping to ttx (#3868, #71). +- [varLib.hvar] Fixed vertical metrics fields copy/pasta error (#3884). +- Micro optimizations in ttLib and sstruct modules (#3878, #3879). +- [unicodedata] Add Garay script to RTL_SCRIPTS (#3882). +- [roundingPen] Remove unreliable kwarg usage. Argument names aren’t consistent among + point pens’ ``.addComponent()`` implementations, in particular ``baseGlyphName`` + vs ``glyphName`` (#3880). + +4.58.5 (released 2025-07-03) +---------------------------- + +- [feaLib] Don't try to combine ligature & multisub rules (#3874). +- [feaLib/ast] Use weakref proxies to avoid cycles in visitor (#3873). +- [varLib.instancer] Fixed instancing CFF2 fonts where VarData contains more than 64k items (#3858). + +4.58.4 (released 2025-06-13) +---------------------------- + +- [feaLib] Allow for empty MarkFilter & MarkAttach sets (#3856). + +4.58.3 (released 2025-06-13) +---------------------------- + +- [feaLib] Fixed iterable check for Python 3.13.4 and newer (#3854, #3855). + +4.58.2 (released 2025-06-06) +---------------------------- + +- [ttLib.reorderGlyphs] Handle CFF2 when reordering glyphs (#3852) +- [subset] Copy name IDs in use before scrapping or scrambling them for webfonts (#3853) + +4.58.1 (released 2025-05-28) +---------------------------- + +- [varLib] Make sure that fvar named instances only reuse name ID 2 or 17 if they are at the default location across all axes, to match OT spec requirement (#3831). +- [feaLib] Improve single substitution promotion to multiple/ligature substitutions, fixing a few bugs as well (#3849). +- [loggingTools] Make ``Timer._time`` a static method that doesn't take self, makes it easier to override (#3836). +- [featureVars] Use ``None`` for empty ConditionSet, which translates to a null offset in the compiled table (#3850). +- [feaLib] Raise an error on conflicting ligature substitution rules instead of silently taking the last one (#3835). +- Add typing annotations to T2CharStringPen (#3837). +- [feaLib] Add single substitutions that were promoted to multiple or ligature substitutions to ``aalt`` feature (#3847). +- [featureVars] Create a default ``LangSys`` in a ``ScriptRecord`` if missing when adding feature variations to existing GSUB later in the build (#3838). +- [symfont] Added a ``main()``. +- [cffLib.specializer] Fix rmoveto merging when blends used (#3839, #3840). +- [pyftmerge] Add support for cmap format 14 in the merge tool (#3830). +- [varLib.instancer/cff2] Fix vsindex of Private dicts when instantiating (#3828, #3232). +- Update text file read to use UTF-8 with optional BOM so it works with e.g. Windows Notepad.exe (#3824). +- [varLib] Ensure that instances only reuse name ID 2 or 17 if they are at the default location across all axes (#3831). +- [varLib] Create a dflt LangSys in a ScriptRecord when adding variations later, to fix an avoidable crash in an edge case (#3838). + +4.58.0 (released 2025-05-10) +---------------------------- + +- Drop Python 3.8, require 3.9+ (#3819) +- [HVAR, VVAR] Prune unused regions when using a direct mapping (#3797) +- [Docs] Improvements to ufoLib documentation (#3721) +- [Docs] Improvements to varLib documentation (#3727) +- [Docs] Improvements to Pens and pen-module documentation (#3724) +- [Docs] Miscellany updates to docs (misc modules and smaller modules) (#3730) +- [subset] Close codepoints over BiDi mirror variants. (#3801) +- [feaLib] Fix serializing ChainContextPosStatement and + ChainContextSubstStatement in some rare cases (#3788) +- [designspaceLib] Clarify user expectations for getStatNames (#2892) +- [GVAR] Add support for new `GVAR` table (#3728) +- [TSI0, TSI5] Derive number of entries to decompile from data length (#2477) +- [ttLib] Fix `AttributeError` when reporting table overflow (#3808) +- [ttLib] Apply rounding more often in getCoordinates (#3798) +- [ttLib] Ignore component bounds if empty (#3799) +- [ttLib] Change the separator for duplicate glyph names from "#" to "." (#3809) +- [feaLib] Support subtable breaks in CursivePos, MarkBasePos, MarkToLigPos and + MarkToMarkPos lookups (#3800, #3807) +- [feaLib] If the same lookup has single substitutions and ligature + substitutions, upgrade single substitutions to ligature substitutions with + one input glyph (#3805) +- [feaLib] Correctly handle in single pos lookups (#3803) +- [feaLib] Remove duplicates from class pair pos classes instead of raising an + error (#3804) +- [feaLib] Support creating extension lookups using useExtenion lookup flag + instead of silently ignoring it (#3811) +- [STAT] Add typing for the simpler STAT arguments (#3812) +- [otlLib.builder] Add future import for annotations (#3814) +- [cffLib] Fix reading supplement encoding (#3813) +- [voltLib] Add some missing functionality and fixes to voltLib and VoltToFea, + making the conversion to feature files more robust. Add also `fonttools + voltLib` command line tool to compile VOLT sources directly (doing an + intermediate fea conversion internally) (#3818) +- [pens] Add some PointPen annotations (#3820) + +4.57.0 (released 2025-04-03) +---------------------------- + +- [ttLib.__main__] Add `--no-recalc-timestamp` flag (#3771) +- [ttLib.__main__] Add `-b` (recalcBBoxes=False) flag (#3772) +- [cmap] Speed up glyphOrder loading from cmap (#3774) +- [ttLib.__main__] Improvements around the `-t` flag (#3776) +- [Debg] Fix parsing from XML; add roundtrip tests (#3781) +- [fealib] Support \*Base.MinMax tables (#3783, #3786) +- [config] Add OPTIMIZE_FONT_SPEED (#3784) +- [varLib.hvar] New module to add HVAR table to the font (#3780) +- [otlLib.optimize] Fix crash when the provided TTF does not contain a `GPOS` (#3794) + +4.56.0 (released 2025-02-07) +---------------------------- + +- [varStore] Sort the input todo list with the same sorting key used for the opimizer's output (#3767). +- [otData] Fix DeviceTable's ``DeltaValue`` repeat value which caused a crash after importing from XML and then compiling a GPOS containing Device tables (#3758). +- [feaLib] Make ``FeatureLibError`` pickleable, so client can e.g. use feaLib to can compile features in parallel with multiprocessing (#3762). +- [varLib/gvar] Removed workaround for old, long-fixed macOS bug about composite glyphs with all zero deltas (#1381, #1788). +- [Docs] Updated ttLib documentation, beefed up TTFont and TTGlyphSet explanations (#3720). + +4.55.8 (released 2025-01-29) +---------------------------- + +- [MetaTools] Fixed bug in buildUCD.py script whereby the first non-header line of some UCD text file was being skipped. This affected in particular the U+00B7 (MIDDLE DOT) entry of ScriptExtensions.txt (#3756). + +4.55.7 (released 2025-01-28) +---------------------------- + +- Shorten the changelog included in PyPI package description to accommodate maximum length limit imposed by Azure DevOps. No actual code changes since v4.55.6 (#3754). + +4.55.6 (released 2025-01-24) +---------------------------- + +- [glyf] Fixed regression introduced in 4.55.5 when computing bounds of nested composite glyphs with transformed components (#3752). + +4.55.5 (released 2025-01-23) +---------------------------- + +- [glyf] Fixed recalcBounds of transformed components with unrounded coordinates (#3750). +- [feaLib] Allow duplicate script/language statements (#3749). + +4.55.4 (released 2025-01-21) +---------------------------- + +- [bezierTools] Fixed ``splitCubicAtT`` sometimes not returning identical start/end points as result of numerical precision (#3742, #3743). +- [feaLib/ast] Fixed docstring of ``AlternateSubstStatement`` (#3735). +- [transform] Typing fixes (#3734). + +4.55.3 (released 2024-12-10) +---------------------------- + +- [Docs] fill out ttLib table section [#3716] +- [feaLib] More efficient inline format 4 lookups [#3726] + +4.55.2 (released 2024-12-05) +---------------------------- + +- [Docs] update Sphinx config (#3712) +- [designspaceLib] Allow axisOrdering to be set to zero (#3715) +- [feaLib] Don’t modify variable anchors in place (#3717) + +4.55.1 (released 2024-12-02) +---------------------------- + +- [ttGlyphSet] Support VARC CFF2 fonts (#3683) +- [DecomposedTransform] Document and implement always skewY == 0 (#3697) +- [varLib] "Fix" cython iup issue? (#3704) +- Cython minor refactor (#3705) + + +4.55.0 (released 2024-11-14) +---------------------------- + +- [cffLib.specializer] Adjust stack use calculation (#3689) +- [varLib] Lets not add mac names if the rest of name doesn't have them (#3688) +- [ttLib.reorderGlyphs] Update CFF table charstrings and charset (#3682) +- [cffLib.specializer] Add cmdline to specialize a CFF2 font (#3675, #3679) +- [CFF2] Lift uint16 VariationStore.length limitation (#3674) +- [subset] consider variation selectors subsetting cmap14 (#3672) +- [varLib.interpolatable] Support CFF2 fonts (#3670) +- Set isfinal to true in XML parser for proper resource cleanup (#3669) +- [removeOverlaps] Fix CFF CharString width (#3659) +- [glyf] Add optimizeSize option (#3657) +- Python 3.13 support (#3656) +- [TupleVariation] Optimize for loading speed, not size (#3650, #3653) + + +4.54.1 (released 2024-09-24) +---------------------------- + +- [unicodedata] Update to Unicode 16 +- [subset] Escape ``\\`` in doc string + +4.54.0 (released 2024-09-23) +---------------------------- + +- [Docs] Small docs cleanups by @n8willis (#3611) +- [Docs] cleanup code blocks by @n8willis (#3627) +- [Docs] fix Sphinx builds by @n8willis (#3625) +- [merge] Minor fixes to documentation for merge by @drj11 (#3588) +- [subset] Small tweaks to pyftsubset documentation by @RoelN (#3633) +- [Tests] Do not require fonttools command to be available by @behdad (#3612) +- [Tests] subset_test: add failing test to reproduce issue #3616 by @anthrotype (#3622) +- [ttLib] NameRecordVisitor: include whole sequence of character variants' UI labels, not just the first by @anthrotype (#3617) +- [varLib.avar] Reconstruct mappings from binary by @behdad (#3598) +- [varLib.instancer] Fix visual artefacts with partial L2 instancing by @Hoolean (#3635) +- [varLib.interpolatable] Support discrete axes in .designspace by @behdad (#3599) +- [varLib.models] By default, assume OpenType-like normalized space by @behdad (#3601) + +4.53.1 (released 2024-07-05) +---------------------------- + +- [feaLib] Improve the sharing of inline chained lookups (#3559) +- [otlLib] Correct the calculation of OS/2.usMaxContext with reversed chaining contextual single substitutions (#3569) +- [misc.visitor] Visitors search the inheritance chain of objects they are visiting (#3581) + +4.53.0 (released 2024-05-31) +---------------------------- + +- [ttLib.removeOverlaps] Support CFF table to aid in downconverting CFF2 fonts (#3528) +- [avar] Fix crash when accessing not-yet-existing attribute (#3550) +- [docs] Add buildMathTable to otlLib.builder documentation (#3540) +- [feaLib] Allow UTF-8 with BOM when reading features (#3495) +- [SVGPathPen] Revert rounding coordinates to two decimal places by default (#3543) +- [varLib.instancer] Refix output filename decision-making (#3545, #3544, #3548) + +4.52.4 (released 2024-05-27) +---------------------------- + +- [varLib.cff] Restore and deprecate convertCFFtoCFF2 that was removed in 4.52.0 + release as it is used by downstream projects (#3535). + +4.52.3 (released 2024-05-27) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.2. No other code changes. + +4.52.2 (released 2024-05-27) +---------------------------- + +- [varLib.interpolatable] Ensure that scipy/numpy output is JSON-serializable + (#3522, #3526). +- [housekeeping] Regenerate table lists, to fix pyinstaller packaging of the new + ``VARC`` table (#3531, #3529). +- [cffLib] Make CFFToCFF2 and CFF2ToCFF more robust (#3521, #3525). + +4.52.1 (released 2024-05-24) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.0. No other code changes. + +4.52.0 (released 2024-05-24) +---------------------------- + +- Added support for the new ``VARC`` (Variable Composite) table that is being + proposed to OpenType spec (#3395). For more info: + https://github.com/harfbuzz/boring-expansion-spec/blob/main/VARC.md +- [ttLib.__main__] Fixed decompiling all tables (90fed08). +- [feaLib] Don't reference the same lookup index multiple times within the same + feature record, it is only applied once anyway (#3520). +- [cffLib] Moved methods to desubroutinize, remove hints and unused subroutines + from subset module to cffLib (#3517). +- [varLib.instancer] Added support for partial-instancing CFF2 tables! Also, added + method to down-convert from CFF2 to CFF 1.0, and CLI entry points to convert + CFF<->CFF2 (#3506). +- [subset] Prune unused user name IDs even with --name-IDs='*' (#3410). +- [ttx] use GNU-style getopt to intermix options and positional arguments (#3509). +- [feaLib.variableScalar] Fixed ``value_at_location()`` method (#3491) +- [psCharStrings] Shorten output of ``encodeFloat`` (#3492). +- [bezierTools] Fix infinite-recursion in ``calcCubicArcLength`` (#3502). +- [avar2] Implement ``avar2`` support in ``TTFont.getGlyphSet()`` (#3473). + +4.51.0 (released 2024-04-05) +---------------------------- + +- [ttLib] Optimization on loading aux fields (#3464). +- [ttFont] Add reorderGlyphs (#3468). + +4.50.0 (released 2024-03-15) +---------------------------- + +- [pens] Added decomposing filter pens that draw components as regular contours (#3460). +- [instancer] Drop explicit no-op axes from TupleVariations (#3457). +- [cu2qu/ufo] Return set of modified glyph names from fonts_to_quadratic (#3456). + +4.49.0 (released 2024-02-15) +---------------------------- + +- [otlLib] Add API for building ``MATH`` table (#3446) + +4.48.1 (released 2024-02-06) +---------------------------- + +- Fixed uploading wheels to PyPI, no code changes since v4.48.0. + +4.48.0 (released 2024-02-06) +---------------------------- + +- [varLib] Do not log when there are no OTL tables to be merged. +- [setup.py] Do not restrict lxml<5 any more, tests pass just fine with lxml>=5. +- [feaLib] Remove glyph and class names length restrictions in FEA (#3424). +- [roundingPens] Added ``transformRoundFunc`` parameter to the rounding pens to allow + for custom rounding of the components' transforms (#3426). +- [feaLib] Keep declaration order of ligature components within a ligature set, instead + of sorting by glyph name (#3429). +- [feaLib] Fixed ordering of alternates in ``aalt`` lookups, following the declaration + order of feature references within the ``aalt`` feature block (#3430). +- [varLib.instancer] Fixed a bug in the instancer's IUP optimization (#3432). +- [sbix] Support sbix glyphs with new graphicType "flip" (#3433). +- [svgPathPen] Added ``--glyphs`` option to dump the SVG paths for the named glyphs + in the font (0572f78). +- [designspaceLib] Added "description" attribute to ```` and ```` + elements, and allow multiple ```` elements to group ```` elements + that are logically related (#3435, #3437). +- [otlLib] Correctly choose the most compact GSUB contextual lookup format (#3439). + +4.47.2 (released 2024-01-11) +---------------------------- + +Minor release to fix uploading wheels to PyPI. + +4.47.1 (released 2024-01-11) +---------------------------- + +- [merge] Improve help message and add standard command line options (#3408) +- [otlLib] Pass ``ttFont`` to ``name.addName`` in ``buildStatTable`` (#3406) +- [featureVars] Re-use ``FeatureVariationRecord``'s when possible (#3413) + +4.47.0 (released 2023-12-18) +---------------------------- + +- [varLib.models] New API for VariationModel: ``getMasterScalars`` and + ``interpolateFromValuesAndScalars``. +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + add a Summary page in the front, and an Index and Table-of-Contents in the back. + Change the page size to Letter. +- [Docs/designspaceLib] Defined a new ``public.fontInfo`` lib key, not used anywhere yet (#3358). + +4.46.0 (released 2023-12-02) +---------------------------- + +- [featureVars] Allow to register the same set of substitution rules to multiple features. + The ``addFeatureVariations`` function can now take a list of featureTags; similarly, the + lib key 'com.github.fonttools.varLib.featureVarsFeatureTag' can now take a + comma-separateed string of feature tags (e.g. "salt,ss01") instead of a single tag (#3360). +- [featureVars] Don't overwrite GSUB FeatureVariations, but append new records to it + for features which are not already there. But raise ``VarLibError`` if the feature tag + already has feature variations associated with it (#3363). +- [varLib] Added ``addGSUBFeatureVariations`` function to add GSUB Feature Variations + to an existing variable font from rules defined in a DesignSpace document (#3362). +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + a new test for "underweight" glyphs. The new test reports quite a few false-positives + though. Please send feedback. + +4.45.1 (released 2023-11-23) +---------------------------- + +- [varLib.interpolatable] Various bugfixes and improvements, better reporting, reduced + false positives. +- [ttGlyphSet] Added option to not recalculate glyf bounds (#3348). + +4.45.0 (released 2023-11-20) +---------------------------- + +- [varLib.interpolatable] Vastly improved algorithms. Also available now is ``--pdf`` + and ``--html`` options to generate a PDF or HTML report of the interpolation issues. + The PDF/HTML report showcases the problematic masters, the interpolated broken + glyph, as well as the proposed fixed version. + +4.44.3 (released 2023-11-15) +---------------------------- + +- [subset] Only prune codepage ranges for OS/2.version >= 1, ignore otherwise (#3334). +- [instancer] Ensure hhea vertical metrics stay in sync with OS/2 ones after instancing + MVAR table containing 'hasc', 'hdsc' or 'hlgp' tags (#3297). + +4.44.2 (released 2023-11-14) +---------------------------- + +- [glyf] Have ``Glyph.recalcBounds`` skip empty components (base glyph with no contours) + when computing the bounding box of composite glyphs. This simply restores the existing + behavior before some changes were introduced in fonttools 4.44.0 (#3333). + +4.44.1 (released 2023-11-14) +---------------------------- + +- [feaLib] Ensure variable mark anchors are deep-copied while building since they + get modified in-place and later reused (#3330). +- [OS/2|subset] Added method to ``recalcCodePageRanges`` to OS/2 table class; added + ``--prune-codepage-ranges`` to `fonttools subset` command (#3328, #2607). + +4.44.0 (released 2023-11-03) +---------------------------- + +- [instancer] Recalc OS/2 AvgCharWidth after instancing if default changes (#3317). +- [otlLib] Make ClassDefBuilder class order match varLib.merger's, i.e. large + classes first, then glyph lexicographic order (#3321, #3324). +- [instancer] Allow not specifying any of min:default:max values and let be filled + up with fvar's values (#3322, #3323). +- [instancer] When running --update-name-table ignore axes that have no STAT axis + values (#3318, #3319). +- [Debg] When dumping to ttx, write the embedded JSON as multi-line string with + indentation (92cbfee0d). +- [varStore] Handle > 65535 items per encoding by splitting VarData subtable (#3310). +- [subset] Handle null-offsets in MarkLigPos subtables. +- [subset] Keep East Asian spacing fatures vhal, halt, chws, vchw by default (#3305). +- [instancer.solver] Fixed case where axisDef < lower and upper < axisMax (#3304). +- [glyf] Speed up compilation, mostly around ``recalcBounds`` (#3301). +- [varLib.interpolatable] Speed it up when working on variable fonts, plus various + micro-optimizations (#3300). +- Require unicodedata2 >= 15.1.0 when installed with 'unicode' extra, contains UCD 15.1. + +4.43.1 (released 2023-10-06) +---------------------------- + +- [EBDT] Fixed TypeError exception in `_reverseBytes` method triggered when dumping + some bitmap fonts with `ttx -z bitwise` option (#3162). +- [v/hhea] Fixed UnboundLocalError exception in ``recalc`` method when no vmtx or hmtx + tables are present (#3290). +- [bezierTools] Fixed incorrectly typed cython local variable leading to TypeError when + calling ``calcQuadraticArcLength`` (#3288). +- [feaLib/otlLib] Better error message when building Coverage table with missing glyph (#3286). + +4.43.0 (released 2023-09-29) +---------------------------- + +- [subset] Set up lxml ``XMLParser(resolve_entities=False)`` when parsing OT-SVG documents + to prevent XML External Entity (XXE) attacks (9f61271dc): + https://codeql.github.com/codeql-query-help/python/py-xxe/ +- [varLib.iup] Added workaround for a Cython bug in ``iup_delta_optimize`` that was + leading to IUP tolerance being incorrectly initialised, resulting in sub-optimal deltas + (60126435d, cython/cython#5732). +- [varLib] Added new command-line entry point ``fonttools varLib.avar`` to add an + ``avar`` table to an existing VF from axes mappings in a .designspace file (0a3360e52). +- [instancer] Fixed bug whereby no longer used variation regions were not correctly pruned + after VarData optimization (#3268). +- Added support for Python 3.12 (#3283). + +4.42.1 (released 2023-08-20) +---------------------------- + +- [t1Lib] Fixed several Type 1 issues (#3238, #3240). +- [otBase/packer] Allow sharing tables reached by different offset sizes (#3241, #3236). +- [varLib/merger] Fix Cursive attachment merging error when all anchors are NULL (#3248, #3247). +- [ttLib] Fixed warning when calling ``addMultilingualName`` and ``ttFont`` parameter was not + passed on to ``findMultilingualName`` (#3253). + +4.42.0 (released 2023-08-02) +---------------------------- + +- [varLib] Use sentinel value 0xFFFF to mark a glyph advance in hmtx/vmtx as non + participating, allowing sparse masters to contain glyphs for variation purposes other + than {H,V}VAR (#3235). +- [varLib/cff] Treat empty glyphs in non-default masters as missing, thus not participating + in CFF2 delta computation, similarly to how varLib already treats them for gvar (#3234). +- Added varLib.avarPlanner script to deduce 'correct' avar v1 axis mappings based on + glyph average weights (#3223). + +4.41.1 (released 2023-07-21) +---------------------------- + +- [subset] Fixed perf regression in v4.41.0 by making ``NameRecordVisitor`` only visit + tables that do contain nameID references (#3213, #3214). +- [varLib.instancer] Support instancing fonts containing null ConditionSet offsets in + FeatureVariationRecords (#3211, #3212). +- [statisticsPen] Report font glyph-average weight/width and font-wide slant. +- [fontBuilder] Fixed head.created date incorrectly set to 0 instead of the current + timestamp, regression introduced in v4.40.0 (#3210). +- [varLib.merger] Support sparse ``CursivePos`` masters (#3209). + +4.41.0 (released 2023-07-12) +---------------------------- + +- [fontBuilder] Fixed bug in setupOS2 with default panose attribute incorrectly being + set to a dict instead of a Panose object (#3201). +- [name] Added method to ``removeUnusedNameRecords`` in the user range (#3185). +- [varLib.instancer] Fixed issue with L4 instancing (moving default) (#3179). +- [cffLib] Use latin1 so we can roundtrip non-ASCII in {Full,Font,Family}Name (#3202). +- [designspaceLib] Mark as optional in docs (as it is in the code). +- [glyf-1] Fixed drawPoints() bug whereby last cubic segment becomes quadratic (#3189, #3190). +- [fontBuilder] Propagate the 'hidden' flag to the fvar Axis instance (#3184). +- [fontBuilder] Update setupAvar() to also support avar 2, fixing ``_add_avar()`` call + site (#3183). +- Added new ``voltLib.voltToFea`` submodule (originally Tiro Typeworks' "Volto") for + converting VOLT OpenType Layout sources to FEA format (#3164). + +4.40.0 (released 2023-06-12) +---------------------------- + +- Published native binary wheels to PyPI for all the python minor versions and platform + and architectures currently supported that would benefit from this. They will include + precompiled Cython-accelerated modules (e.g. cu2qu) without requiring to compile them + from source. The pure-python wheel and source distribution will continue to be + published as always (pip will automatically chose them when no binary wheel is + available for the given platform, e.g. pypy). Use ``pip install --no-binary=fonttools fonttools`` + to expliclity request pip to install from the pure-python source. +- [designspaceLib|varLib] Add initial support for specifying axis mappings and build + ``avar2`` table from those (#3123). +- [feaLib] Support variable ligature caret position (#3130). +- [varLib|glyf] Added option to --drop-implied-oncurves; test for impliable oncurve + points either before or after rounding (#3146, #3147, #3155, #3156). +- [TTGlyphPointPen] Don't error with empty contours, simply ignore them (#3145). +- [sfnt] Fixed str vs bytes remnant of py3 transition in code dealing with de/compiling + WOFF metadata (#3129). +- [instancer-solver] Fixed bug when moving default instance with sparse masters (#3139, #3140). +- [feaLib] Simplify variable scalars that don’t vary (#3132). +- [pens] Added filter pen that explicitly emits closing line when lastPt != movePt (#3100). +- [varStore] Improve optimize algorithm and better document the algorithm (#3124, #3127). + Added ``quantization`` option (#3126). +- Added CI workflow config file for building native binary wheels (#3121). +- [fontBuilder] Added glyphDataFormat=0 option; raise error when glyphs contain cubic + outlines but glyphDataFormat was not explicitly set to 1 (#3113, #3119). +- [subset] Prune emptied GDEF.MarkGlyphSetsDef and remap indices; ensure GDEF is + subsetted before GSUB and GPOS (#3114, #3118). +- [xmlReader] Fixed issue whereby DSIG table data was incorrectly parsed (#3115, #2614). +- [varLib/merger] Fixed merging of SinglePos with pos=0 (#3111, #3112). +- [feaLib] Demote "Feature has not been defined" error to a warning when building aalt + and referenced feature is empty (#3110). +- [feaLib] Dedupe multiple substitutions with classes (#3105). + +4.39.4 (released 2023-05-10) +---------------------------- + +- [varLib.interpolatable] Allow for sparse masters (#3075) +- [merge] Handle differing default/nominalWidthX in CFF (#3070) +- [ttLib] Add missing main.py file to ttLib package (#3088) +- [ttx] Fix missing composite instructions in XML (#3092) +- [ttx] Fix split tables option to work on filenames containing '%' (#3096) +- [featureVars] Process lookups for features other than rvrn last (#3099) +- [feaLib] support multiple substitution with classes (#3103) + +4.39.3 (released 2023-03-28) +---------------------------- + +- [sbix] Fixed TypeError when compiling empty glyphs whose imageData is None, regression + was introduced in v4.39 (#3059). +- [ttFont] Fixed AttributeError on python <= 3.10 when opening a TTFont from a tempfile + SpooledTemporaryFile, seekable method only added on python 3.11 (#3052). + +4.39.2 (released 2023-03-16) +---------------------------- + +- [varLib] Fixed regression introduced in 4.39.1 whereby an incomplete 'STAT' table + would be built even though a DesignSpace v5 did contain 'STAT' definitions (#3045, #3046). + +4.39.1 (released 2023-03-16) +---------------------------- + +- [avar2] Added experimental support for reading/writing avar version 2 as specified in + this draft proposal: https://github.com/harfbuzz/boring-expansion-spec/blob/main/avar2.md +- [glifLib] Wrap underlying XML library exceptions with GlifLibError when parsing GLIFs, + and also print the name and path of the glyph that fails to be parsed (#3042). +- [feaLib] Consult avar for normalizing user-space values in ConditionSets and in + VariableScalars (#3042, #3043). +- [ttProgram] Handle string input to Program.fromAssembly() (#3038). +- [otlLib] Added a config option to emit GPOS 7 lookups, currently disabled by default + because of a macOS bug (#3034). +- [COLRv1] Added method to automatically compute ClipBoxes (#3027). +- [ttFont] Fixed getGlyphID to raise KeyError on missing glyphs instead of returning + None. The regression was introduced in v4.27.0 (#3032). +- [sbix] Fixed UnboundLocalError: cannot access local variable 'rawdata' (#3031). +- [varLib] When building VF, do not overwrite a pre-existing ``STAT`` table that was built + with feaLib from FEA feature file. Also, added support for building multiple VFs + defined in Designspace v5 from ``fonttools varLib`` script (#3024). +- [mtiLib] Only add ``Debg`` table with lookup names when ``FONTTOOLS_LOOKUP_DEBUGGING`` + env variable is set (#3023). + +4.39.0 (released 2023-03-06) +---------------------------- + +- [mtiLib] Optionally add `Debg` debug info for MTI feature builds (#3018). +- [ttx] Support reading input file from standard input using special `-` character, + similar to existing `-o -` option to write output to standard output (#3020). +- [cython] Prevent ``cython.compiled`` raise AttributeError if cython not installed + properly (#3017). +- [OS/2] Guard against ZeroDivisionError when calculating xAvgCharWidth in the unlikely + scenario no glyph has non-zero advance (#3015). +- [subset] Recompute xAvgCharWidth independently of --no-prune-unicode-ranges, + previously the two options were involuntarily bundled together (#3012). +- [fontBuilder] Add ``debug`` parameter to addOpenTypeFeatures method to add source + debugging information to the font in the ``Debg`` private table (#3008). +- [name] Make NameRecord `__lt__` comparison not fail on Unicode encoding errors (#3006). +- [featureVars] Fixed bug in ``overlayBox`` (#3003, #3005). +- [glyf] Added experimental support for cubic bezier curves in TrueType glyf table, as + outlined in glyf v1 proposal (#2988): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-cubicOutlines.md +- Added new qu2cu module and related qu2cuPen, the reverse of cu2qu for converting + TrueType quadratic splines to cubic bezier curves (#2993). +- [glyf] Added experimental support for reading and writing Variable Composites/Components + as defined in glyf v1 spec proposal (#2958): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-varComposites.md. +- [pens]: Added `addVarComponent` method to pen protocols' base classes, which pens can implement + to handle varcomponents (by default they get decomposed) (#2958). +- [misc.transform] Added DecomposedTransform class which implements an affine transformation + with separate translate, rotation, scale, skew, and transformation-center components (#2598) +- [sbix] Ensure Glyph.referenceGlyphName is set; fixes error after dumping and + re-compiling sbix table with 'dupe' glyphs (#2984). +- [feaLib] Be cleverer when merging chained single substitutions into same lookup + when they are specified using the inline notation (#2150, #2974). +- [instancer] Clamp user-inputted axis ranges to those of fvar (#2959). +- [otBase/subset] Define ``__getstate__`` for BaseTable so that a copied/pickled 'lazy' + object gets its own OTTableReader to read from; incidentally fixes a bug while + subsetting COLRv1 table containing ClipBoxes on python 3.11 (#2965, #2968). +- [sbix] Handle glyphs with "dupe" graphic type on compile correctly (#2963). +- [glyf] ``endPointsOfContours`` field should be unsigned! Kudos to behdad for + spotting one of the oldest bugs in FT. Probably nobody has ever dared to make + glyphs with more than 32767 points... (#2957). +- [feaLib] Fixed handling of ``ignore`` statements with unmarked glyphs to match + makeotf behavior, which assumes the first glyph is marked (#2950). +- Reformatted code with ``black`` and enforce new code style via CI check (#2925). +- [feaLib] Sort name table entries following OT spec prescribed order in the builder (#2927). +- [cu2quPen] Add Cu2QuMultiPen that converts multiple outlines at a time in + interpolation compatible way; its methods take a list of tuples arguments + that would normally be passed to individual segment pens, and at the end it + dispatches the converted outlines to each pen (#2912). +- [reverseContourPen/ttGlyphPen] Add outputImpliedClosingLine option (#2913, #2914, + #2921, #2922, #2995). +- [gvar] Avoid expanding all glyphs unnecessarily upon compile (#2918). +- [scaleUpem] Fixed bug whereby CFF2 vsindex was scaled; it should not (#2893, #2894). +- [designspaceLib] Add DS.getAxisByTag and refactor getAxis (#2891). +- [unicodedata] map Zmth<->math in ot_tag_{to,from}_script (#1737, #2889). +- [woff2] Support encoding/decoding OVERLAP_SIMPLE glyf flags (#2576, #2884). +- [instancer] Update OS/2 class and post.italicAngle when default moved (L4) +- Dropped support for Python 3.7 which reached EOL, fontTools requires 3.8+. +- [instancer] Fixed instantiateFeatureVariations logic when a rule range becomes + default-applicable (#2737, #2880). +- [ttLib] Add main to ttFont and ttCollection that just decompile and re-compile the + input font (#2869). +- [featureVars] Insert 'rvrn' lookup at the beginning of LookupList, to work around bug + in Apple implementation of 'rvrn' feature which the spec says it should be processed + early whereas on macOS 10.15 it follows lookup order (#2140, #2867). +- [instancer/mutator] Remove 'DSIG' table if present. +- [svgPathPen] Don't close path in endPath(), assume open unless closePath() (#2089, #2865). + +4.38.0 (released 2022-10-21) +---------------------------- + +- [varLib.instancer] Added support for L4 instancing, i.e. moving the default value of + an axis while keeping it variable. Thanks Behdad! (#2728, #2861). + It's now also possible to restrict an axis min/max values beyond the current default + value, e.g. a font wght has min=100, def=400, max=900 and you want a partial VF that + only varies between 500 and 700, you can now do that. + You can either specify two min/max values (wght=500:700), and the new default will be + set to either the minimum or maximum, depending on which one is closer to the current + default (e.g. 500 in this case). Or you can specify three values (e.g. wght=500:600:700) + to specify the new default value explicitly. +- [otlLib/featureVars] Set a few Count values so one doesn't need to compile the font + to update them (#2860). +- [varLib.models] Make extrapolation work for 2-master models as well where one master + is at the default location (#2843, #2846). + Add optional extrapolate=False to normalizeLocation() (#2847, #2849). +- [varLib.cff] Fixed sub-optimal packing of CFF2 deltas by no longer rounding them to + integer (#2838). +- [scaleUpem] Calculate numShorts in VarData after scale; handle CFF hintmasks (#2840). + +4.37.4 (released 2022-09-30) +---------------------------- + +- [subset] Keep nameIDs used by CPAL palette entry labels (#2837). +- [varLib] Avoid negative hmtx values when creating font from variable CFF2 font (#2827). +- [instancer] Don't prune stat.ElidedFallbackNameID (#2828). +- [unicodedata] Update Scripts/Blocks to Unicode 15.0 (#2833). + +4.37.3 (released 2022-09-20) +---------------------------- + +- Fix arguments in calls to (glyf) glyph.draw() and drawPoints(), whereby offset wasn't + correctly passed down; this fix also exposed a second bug, where lsb and tsb were not + set (#2824, #2825, adobe-type-tools/afdko#1560). + +4.37.2 (released 2022-09-15) +---------------------------- + +- [subset] Keep CPAL table and don't attempt to prune unused color indices if OT-SVG + table is present even if COLR table was subsetted away; OT-SVG may be referencing the + CPAL table; for now we assume that's the case (#2814, #2815). +- [varLib.instancer] Downgrade GPOS/GSUB version if there are no more FeatureVariations + after instancing (#2812). +- [subset] Added ``--no-lazy`` to optionally load fonts eagerly (mostly to ease + debugging of table lazy loading, no practical effects) (#2807). +- [varLib] Avoid building empty COLR.DeltaSetIndexMap with only identity mappings (#2803). +- [feaLib] Allow multiple value record types (by promoting to the most general format) + within the same PairPos subtable; e.g. this allows variable and non variable kerning + rules to share the same subtable. This also fixes a bug whereby some kerning pairs + would become unreachable while shapiong because of premature subtable splitting (#2772, #2776). +- [feaLib] Speed up ``VarScalar`` by caching models for recurring master locations (#2798). +- [feaLib] Optionally cythonize ``feaLib.lexer``, speeds up parsing FEA a bit (#2799). +- [designspaceLib] Avoid crash when handling unbounded rule conditions (#2797). +- [post] Don't crash if ``post`` legacy format 1 is malformed/improperly used (#2786) +- [gvar] Don't be "lazy" (load all glyph variations up front) when TTFont.lazy=False (#2771). +- [TTFont] Added ``normalizeLocation`` method to normalize a location dict from the + font's defined axes space (also known as "user space") into the normalized (-1..+1) + space. It applies ``avar`` mapping if the font contains an ``avar`` table (#2789). +- [TTVarGlyphSet] Support drawing glyph instances from CFF2 variable glyph set (#2784). +- [fontBuilder] Do not error when building cmap if there are zero code points (#2785). +- [varLib.plot] Added ability to plot a variation model and set of accompaning master + values corresponding to the model's master locations into a pyplot figure (#2767). +- [Snippets] Added ``statShape.py`` script to draw statistical shape of a glyph as an + ellips (requires pycairo) (baecd88). +- [TTVarGlyphSet] implement drawPoints natively, avoiding going through + SegmentToPointPen (#2778). +- [TTVarGlyphSet] Fixed bug whereby drawing a composite glyph multiple times, its + components would shif; needed an extra copy (#2774). + +4.37.1 (released 2022-08-24) +---------------------------- + +- [subset] Fixed regression introduced with v4.37.0 while subsetting the VarStore of + ``HVAR`` and ``VVAR`` tables, whereby an ``AttributeError: subset_varidxes`` was + thrown because an apparently unused import statement (with the side-effect of + dynamically binding that ``subset_varidxes`` method to the VarStore class) had been + accidentally deleted in an unrelated PR (#2679, #2773). +- [pens] Added ``cairoPen`` (#2678). +- [gvar] Read ``gvar`` more lazily by not parsing all of the ``glyf`` table (#2771). +- [ttGlyphSet] Make ``drawPoints(pointPen)`` method work for CFF fonts as well via + adapter pen (#2770). + +4.37.0 (released 2022-08-23) +---------------------------- + +- [varLib.models] Reverted PR #2717 which added support for "narrow tents" in v4.36.0, + as it introduced a regression (#2764, #2765). It will be restored in upcoming release + once we found a solution to the bug. +- [cff.specializer] Fixed issue in charstring generalizer with the ``blend`` operator + (#2750, #1975). +- [varLib.models] Added support for extrapolation (#2757). +- [ttGlyphSet] Ensure the newly added ``_TTVarGlyphSet`` inherits from ``_TTGlyphSet`` + to keep backward compatibility with existing API (#2762). +- [kern] Allow compiling legacy kern tables with more than 64k entries (d21cfdede). +- [visitor] Added new visitor API to traverse tree of objects and dispatch based + on the attribute type: cf. ``fontTools.misc.visitor`` and ``fontTools.ttLib.ttVisitor``. Added ``fontTools.ttLib.scaleUpem`` module that uses the latter to + change a font's units-per-em and scale all the related fields accordingly (#2718, + #2755). + +4.36.0 (released 2022-08-17) +---------------------------- + +- [varLib.models] Use a simpler model that generates narrower "tents" (regions, master + supports) whenever possible: specifically when any two axes that actively "cooperate" + (have masters at non-zero positions for both axes) have a complete set of intermediates. + The simpler algorithm produces fewer overlapping regions and behaves better with + respect to rounding at the peak positions than the generic solver, always matching + intermediate masters exactly, instead of maximally 0.5 units off. This may be useful + when 100% metrics compatibility is desired (#2218, #2717). +- [feaLib] Remove warning when about ``GDEF`` not being built when explicitly not + requested; don't build one unconditonally even when not requested (#2744, also works + around #2747). +- [ttFont] ``TTFont.getGlyphSet`` method now supports selecting a location that + represents an instance of a variable font (supports both user-scale and normalized + axes coordinates via the ``normalized=False`` parameter). Currently this only works + for TrueType-flavored variable fonts (#2738). + +4.35.0 (released 2022-08-15) +---------------------------- + +- [otData/otConverters] Added support for 'biased' PaintSweepGradient start/end angles + to match latest COLRv1 spec (#2743). +- [varLib.instancer] Fixed bug in ``_instantiateFeatureVariations`` when at the same + time pinning one axis and restricting the range of a subsequent axis; the wrong axis + tag was being used in the latter step (as the records' axisIdx was updated in the + preceding step but looked up using the old axes order in the following step) (#2733, + #2734). +- [mtiLib] Pad script tags with space when less than 4 char long (#1727). +- [merge] Use ``'.'`` instead of ``'#'`` in duplicate glyph names (#2742). +- [gvar] Added support for lazily loading glyph variations (#2741). +- [varLib] In ``build_many``, we forgot to pass on ``colr_layer_reuse`` parameter to + the ``build`` method (#2730). +- [svgPathPen] Add a main that prints SVG for input text (6df779fd). +- [cffLib.width] Fixed off-by-one in optimized values; previous code didn't match the + code block above it (2963fa50). +- [varLib.interpolatable] Support reading .designspace and .glyphs files (via optional + ``glyphsLib``). +- Compile some modules with Cython when available and building/installing fonttools + from source: ``varLib.iup`` (35% faster), ``pens.momentsPen`` (makes + ``varLib.interpolatable`` 3x faster). +- [feaLib] Allow features to be built for VF without also building a GDEF table (e.g. + only build GSUB); warn when GDEF would be needed but isn't requested (#2705, 2694). +- [otBase] Fixed ``AttributeError`` when uharfbuzz < 0.23.0 and 'repack' method is + missing (32aa8eaf). Use new ``uharfbuzz.repack_with_tag`` when available (since + uharfbuzz>=0.30.0), enables table-specific optimizations to be performed during + repacking (#2724). +- [statisticsPen] By default report all glyphs (4139d891). Avoid division-by-zero + (52b28f90). +- [feaLib] Added missing required argument to FeatureLibError exception (#2693) +- [varLib.merge] Fixed error during error reporting (#2689). Fixed undefined + ``NotANone`` variable (#2714). + +4.34.4 (released 2022-07-07) +---------------------------- + +- Fixed typo in varLib/merger.py that causes NameError merging COLR glyphs + containing more than 255 layers (#2685). + +4.34.3 (released 2022-07-07) +---------------------------- + +- [designspaceLib] Don't make up bad PS names when no STAT data (#2684) + +4.34.2 (released 2022-07-06) +---------------------------- + +- [varStore/subset] fixed KeyError exception to do with NO_VARIATION_INDEX while + subsetting varidxes in GPOS/GDEF (a08140d). + +4.34.1 (released 2022-07-06) +---------------------------- + +- [instancer] When optimizing HVAR/VVAR VarStore, use_NO_VARIATION_INDEX=False to avoid + including NO_VARIATION_INDEX in AdvWidthMap, RsbMap, LsbMap mappings, which would + push the VarIdx width to maximum (4bytes), which is not desirable. This also fixes + a hard crash when attempting to subset a varfont after it had been partially instanced + with use_NO_VARIATION_INDEX=True. + +4.34.0 (released 2022-07-06) +---------------------------- + +- [instancer] Set RIBBI bits in head and OS/2 table when cutting instances and the + subfamily nameID=2 contains strings like 'Italic' or 'Bold' (#2673). +- [otTraverse] Addded module containing methods for traversing trees of otData tables + (#2660). +- [otTables] Made DeltaSetIndexMap TTX dump less verbose by omitting no-op entries + (#2660). +- [colorLib.builder] Added option to disable PaintColrLayers's reuse of layers from + LayerList (#2660). +- [varLib] Added support for merging multiple master COLRv1 tables into a variable + COLR table (#2660, #2328). Base color glyphs of same name in different masters must have + identical paint graph structure (incl. number of layers, palette indices, number + of color line stops, corresponding paint formats at each level of the graph), + but can differ in the variable fields (e.g. PaintSolid.Alpha). PaintVar* tables + are produced when this happens and a VarStore/DeltaSetIndexMap is added to the + variable COLR table. It is possible for non-default masters to be 'sparse', i.e. + omit some of the color glyphs present in the default master. +- [feaLib] Let the Parser set nameIDs 1 through 6 that were previously reserved (#2675). +- [varLib.varStore] Support NO_VARIATION_INDEX in optimizer and instancer. +- [feaLib] Show all missing glyphs at once at end of parsing (#2665). +- [varLib.iup] Rewrite force-set conditions and limit DP loopback length (#2651). + For Noto Sans, IUP time drops from 23s down to 9s, with only a slight size increase + in the final font. This basically turns the algorithm from O(n^3) into O(n). +- [featureVars] Report about missing glyphs in substitution rules (#2654). +- [mutator/instancer] Added CLI flag to --no-recalc-timestamp (#2649). +- [SVG] Allow individual SVG documents in SVG OT table to be compressed on uncompressed, + and remember that when roundtripping to/from ttx. The SVG.docList is now a list + of SVGDocument namedtuple-like dataclass containing an extra ``compressed`` field, + and no longer a bare 3-tuple (#2645). +- [designspaceLib] Check for descriptor types with hasattr() to allow custom classes + that don't inherit the default descriptors (#2634). +- [subset] Enable sharing across subtables of extension lookups for harfbuzz packing + (#2626). Updated how table packing falls back to fontTools from harfbuzz (#2668). +- [subset] Updated default feature tags following current Harfbuzz (#2637). +- [svgLib] Fixed regex for real number to support e.g. 1e-4 in addition to 1.0e-4. + Support parsing negative rx, ry on arc commands (#2596, #2611). +- [subset] Fixed subsetting SinglePosFormat2 when ValueFormat=0 (#2603). + +4.33.3 (released 2022-04-26) +---------------------------- + +- [designspaceLib] Fixed typo in ``deepcopyExceptFonts`` method, preventing font + references to be transferred (#2600). Fixed another typo in the name of ``Range`` + dataclass's ``__post_init__`` magic method (#2597). + +4.33.2 (released 2022-04-22) +---------------------------- + +- [otBase] Make logging less verbose when harfbuzz fails to serialize. Do not exit + at the first failure but continue attempting to fix offset overflow error using + the pure-python serializer even when the ``USE_HARFBUZZ_REPACKER`` option was + explicitly set to ``True``. This is normal with fonts with relatively large + tables, at least until hb.repack implements proper table splitting. + +4.33.1 (released 2022-04-22) +---------------------------- + +- [otlLib] Put back the ``FONTTOOLS_GPOS_COMPACT_MODE`` environment variable to fix + regression in ufo2ft (and thus fontmake) introduced with v4.33.0 (#2592, #2593). + This is deprecated and will be removed one ufo2ft gets updated to use the new + config setup. + +4.33.0 (released 2022-04-21) +---------------------------- + +- [OS/2 / merge] Automatically recalculate ``OS/2.xAvgCharWidth`` after merging + fonts with ``fontTools.merge`` (#2591, #2538). +- [misc/config] Added ``fontTools.misc.configTools`` module, a generic configuration + system (#2416, #2439). + Added ``fontTools.config`` module, a fontTools-specific configuration + system using ``configTools`` above. + Attached a ``Config`` object to ``TTFont``. +- [otlLib] Replaced environment variable for GPOS compression level with an + equivalent option using the new config system. +- [designspaceLib] Incremented format version to 5.0 (#2436). + Added discrete axes, variable fonts, STAT information, either design- or + user-space location on instances. + Added ``fontTools.designspaceLib.split`` module to split a designspace + into sub-spaces that interpolate and that represent the variable fonts + listed in the document. + Made instance names optional and allow computing them from STAT data instead. + Added ``fontTools.designspaceLib.statNames`` module. + Allow instances to have the same location as a previously defined STAT label. + Deprecated some attributes: + ``SourceDescriptor``: ``copyLib``, ``copyInfo``, ``copyGroups``, ``copyFeatures``. + ``InstanceDescriptor``: ``kerning``, ``info``; ``glyphs``: use rules or sparse + sources. + For both, ``location``: use the more explicit designLocation. + Note: all are soft deprecations and existing code should keep working. + Updated documentation for Python methods and the XML format. +- [varLib] Added ``build_many`` to build several variable fonts from a single + designspace document (#2436). + Added ``fontTools.varLib.stat`` module to build STAT tables from a designspace + document. +- [otBase] Try to use the Harfbuzz Repacker for packing GSUB/GPOS tables when + ``uharfbuzz`` python bindings are available (#2552). Disable it by setting the + "fontTools.ttLib.tables.otBase:USE_HARFBUZZ_REPACKER" config option to ``False``. + If the option is set explicitly to ``True`` but ``uharfbuzz`` can't be imported + or fails to serialize for any reasons, an error will be raised (ImportError or + uharfbuzz errors). +- [CFF/T2] Ensure that ``pen.closePath()`` gets called for CFF2 charstrings (#2577). + Handle implicit CFF2 closePath within ``T2OutlineExtractor`` (#2580). + +4.32.0 (released 2022-04-08) +---------------------------- + +- [otlLib] Disable GPOS7 optimization to work around bug in Apple CoreText. + Always force Chaining GPOS8 for now (#2540). +- [glifLib] Added ``outputImpliedClosingLine=False`` parameter to ``Glyph.draw()``, + to control behaviour of ``PointToSegmentPen`` (6b4e2e7). +- [varLib.interpolatable] Check for wrong contour starting point (#2571). +- [cffLib] Remove leftover ``GlobalState`` class and fix calls to ``TopDictIndex()`` + (#2569, #2570). +- [instancer] Clear ``AxisValueArray`` if it is empty after instantiating (#2563). + +4.31.2 (released 2022-03-22) +---------------------------- + +- [varLib] fix instantiation of GPOS SinglePos values (#2555). + +4.31.1 (released 2022-03-18) +---------------------------- + +- [subset] fix subsetting OT-SVG when glyph id attribute is on the root ```` + element (#2553). + +4.31.0 (released 2022-03-18) +---------------------------- + +- [ttCollection] Fixed 'ResourceWarning: unclosed file' warning (#2549). +- [varLib.merger] Handle merging SinglePos with valueformat=0 (#2550). +- [ttFont] Update glyf's glyphOrder when calling TTFont.setGlyphOrder() (#2544). +- [ttFont] Added ``ensureDecompiled`` method to load all tables irrespective + of the ``lazy`` attribute (#2551). +- [otBase] Added ``iterSubTable`` method to iterate over BaseTable's children of + type BaseTable; useful for traversing a tree of otTables (#2551). + +4.30.0 (released 2022-03-10) +---------------------------- + +- [varLib] Added debug logger showing the glyph name for which ``gvar`` is built (#2542). +- [varLib.errors] Fixed undefined names in ``FoundANone`` and ``UnsupportedFormat`` + exceptions (ac4d5611). +- [otlLib.builder] Added ``windowsNames`` and ``macNames`` (bool) parameters to the + ``buildStatTabe`` function, so that one can select whether to only add one or both + of the two sets (#2528). +- [t1Lib] Added the ability to recreate PostScript stream (#2504). +- [name] Added ``getFirstDebugName``, ``getBest{Family,SubFamily,Full}Name`` methods (#2526). + +4.29.1 (released 2022-02-01) +---------------------------- + +- [colorLib] Fixed rounding issue with radial gradient's start/end circles inside + one another (#2521). +- [freetypePen] Handle rotate/skew transform when auto-computing width/height of the + buffer; raise PenError wen missing moveTo (#2517) + +4.29.0 (released 2022-01-24) +---------------------------- + +- [ufoLib] Fixed illegal characters and expanded reserved filenames (#2506). +- [COLRv1] Don't emit useless PaintColrLayers of lenght=1 in LayerListBuilder (#2513). +- [ttx] Removed legacy ``waitForKeyPress`` method on Windows (#2509). +- [pens] Added FreeTypePen that uses ``freetype-py`` and the pen protocol for + rasterizating outline paths (#2494). +- [unicodedata] Updated the script direction list to Unicode 14.0 (#2484). + Bumped unicodedata2 dependency to 14.0 (#2499). +- [psLib] Fixed type of ``fontName`` in ``suckfont`` (#2496). + +4.28.5 (released 2021-12-19) +---------------------------- + +- [svgPathPen] Continuation of #2471: make sure all occurrences of ``str()`` are now + replaced with user-defined ``ntos`` callable. +- [merge] Refactored code into submodules, plus several bugfixes and improvements: + fixed duplicate-glyph-resolution GSUB-lookup generation code; use tolerance in glyph + comparison for empty glyph's width; ignore space of default ignorable glyphs; + downgrade duplicates-resolution missing-GSUB from assert to warn; added --drop-tables + option (#2473, #2475, #2476). + +4.28.4 (released 2021-12-15) +---------------------------- + +- [merge] Merge GDEF marksets in Lookups properly (#2474). +- [feaLib] Have ``fontTools feaLib`` script exit with error code when build fails (#2459) +- [svgPathPen] Added ``ntos`` option to customize number formatting (e.g. rounding) (#2471). +- [subset] Speed up subsetting of large CFF fonts (#2467). +- [otTables] Speculatively promote lookups to extension to speed up compilation. If the + offset to lookup N is too big to fit in a ushort, the offset to lookup N+1 is going to + be too big as well, so we promote to extension all lookups from lookup N onwards (#2465). + +4.28.3 (released 2021-12-03) +---------------------------- + +- [subset] Fixed bug while subsetting ``COLR`` table, whereby incomplete layer records + pointing to missing glyphs were being retained leading to ``struct.error`` upon + compiling. Make it so that ``glyf`` glyph closure, which follows the ``COLR`` glyph + closure, does not influence the ``COLR`` table subsetting (#2461, #2462). +- [docs] Fully document the ``cmap`` and ``glyf`` tables (#2454, #2457). +- [colorLib.unbuilder] Fixed CLI by deleting no longer existing parameter (180bb1867). + +4.28.2 (released 2021-11-22) +---------------------------- + +- [otlLib] Remove duplicates when building coverage (#2433). +- [docs] Add interrogate configuration (#2443). +- [docs] Remove comment about missing “start” optional argument to ``calcChecksum`` (#2448). +- [cu2qu/cli] Adapt to the latest ufoLib2. +- [subset] Support subsetting SVG table and remove it from the list of drop by default tables (#534). +- [subset] add ``--pretty-svg`` option to pretty print SVG table contents (#2452). +- [merge] Support merging ``CFF`` tables (CID-keyed ``CFF`` is still not supported) (#2447). +- [merge] Support ``--output-file`` (#2447). +- [docs] Split table docs into individual pages (#2444). +- [feaLib] Forbid empty classes (#2446). +- [docs] Improve documentation for ``fontTools.ttLib.ttFont`` (#2442). + +4.28.1 (released 2021-11-08) +---------------------------- + +- [subset] Fixed AttributeError while traversing a color glyph's Paint graph when there is no + LayerList, which is optional (#2441). + +4.28.0 (released 2021-11-05) +---------------------------- + +- Dropped support for EOL Python 3.6, require Python 3.7 (#2417). +- [ufoLib/glifLib] Make filename-clash checks faster by using a set instead of a list (#2422). +- [subset] Don't crash if optional ClipList and LayerList are ``None`` (empty) (#2424, 2439). +- [OT-SVG] Removed support for old deprecated version 1 and embedded color palettes, + which were never officially part of the OpenType SVG spec. Upon compile, reuse offsets + to SVG documents that are identical (#2430). +- [feaLib] Added support for Variable Feature File syntax. This is experimental and subject + to change until it is finalized in the Adobe FEA spec (#2432). +- [unicodedata] Update Scripts/ScriptExtensions/Blocks to UnicodeData 14.0 (#2437). + +4.27.1 (released 2021-09-23) +---------------------------- + +- [otlLib] Fixed error when chained contextual lookup builder overflows (#2404, #2411). +- [bezierTools] Fixed two floating-point bugs: one when computing `t` for a point + lying on an almost horizontal/vertical line; another when computing the intersection + point between a curve and a line (#2413). + +4.27.0 (released 2021-09-14) +---------------------------- + +- [ttLib/otTables] Cleaned up virtual GID handling: allow virtual GIDs in ``Coverage`` + and ``ClassDef`` readers; removed unused ``allowVID`` argument from ``TTFont`` + constructor, and ``requireReal`` argument in ``TTFont.getGlyphID`` method. + Make ``TTFont.setGlyphOrder`` clear reverse glyphOrder map, and assume ``glyphOrder`` + internal attribute is never modified outside setGlyphOrder; added ``TTFont.getGlyphNameMany`` + and ``getGlyphIDMany`` (#1536, #1654, #2334, #2398). +- [py23] Dropped internal use of ``fontTools.py23`` module to fix deprecation warnings + in client code that imports from fontTools (#2234, #2399, #2400). +- [subset] Fix subsetting COLRv1 clip boxes when font is loaded lazily (#2408). + +4.26.2 (released 2021-08-09) +---------------------------- + +- [otTables] Added missing ``CompositeMode.PLUS`` operator (#2390). + +4.26.1 (released 2021-08-03) +---------------------------- + +- [transform] Added ``transformVector`` and ``transformVectors`` methods to the + ``Transform`` class. Similar to ``transformPoint`` but ignore the translation + part (#2386). + +4.26.0 (released 2021-08-03) +---------------------------- + +- [xmlWriter] Default to ``"\n"`` for ``newlinestr`` instead of platform-specific + ``os.linesep`` (#2384). +- [otData] Define COLRv1 ClipList and ClipBox (#2379). +- [removeOverlaps/instancer] Added --ignore-overlap-errors option to work around + Skia PathOps.Simplify bug (#2382, #2363, google/fonts#3365). +- NOTE: This will be the last version to support Python 3.6. FontTools will require + Python 3.7 or above from the next release (#2350) + +4.25.2 (released 2021-07-26) +---------------------------- + +- [COLRv1] Various changes to sync with the latest CORLv1 draft spec. In particular: + define COLR.VarIndexMap, remove/inline ColorIndex struct, add VarIndexBase to ``PaintVar*`` tables (#2372); + add reduced-precicion specialized transform Paints; + define Angle as fraction of half circle encoded as F2Dot14; + use FWORD (int16) for all Paint center coordinates; + change PaintTransform to have an offset to Affine2x3; +- [ttLib] when importing XML, only set sfntVersion if the font has no reader and is empty (#2376) + +4.25.1 (released 2021-07-16) +---------------------------- + +- [ttGlyphPen] Fixed bug in ``TTGlyphPointPen``, whereby open contours (i.e. starting + with segmentType "move") would throw ``NotImplementedError``. They are now treated + as if they are closed, like with the ``TTGlyphPen`` (#2364, #2366). + +4.25.0 (released 2021-07-05) +---------------------------- + +- [tfmLib] Added new library for parsing TeX Font Metric (TFM) files (#2354). +- [TupleVariation] Make shared tuples order deterministic on python < 3.7 where + Counter (subclass of dict) doesn't remember insertion order (#2351, #2353). +- [otData] Renamed COLRv1 structs to remove 'v1' suffix and match the updated draft + spec: 'LayerV1List' -> 'LayerList', 'BaseGlyphV1List' -> 'BaseGlyphList', + 'BaseGlyphV1Record' -> 'BaseGlyphPaintRecord' (#2346). + Added 8 new ``PaintScale*`` tables: with/without centers, uniform vs non-uniform. + Added ``*AroundCenter`` variants to ``PaintRotate`` and ``PaintSkew``: the default + versions no longer have centerX/Y, but default to origin. + ``PaintRotate``, ``PaintSkew`` and ``PaintComposite`` formats were re-numbered. + NOTE: these are breaking changes; clients using the experimental COLRv1 API will + have to be updated (#2348). +- [pointPens] Allow ``GuessSmoothPointPen`` to accept a tolerance. Fixed call to + ``math.atan2`` with x/y parameters inverted. Sync the code with fontPens (#2344). +- [post] Fixed parsing ``post`` table format 2.0 when it contains extra garbage + at the end of the stringData array (#2314). +- [subset] drop empty features unless 'size' with FeatureParams table (#2324). +- [otlLib] Added ``otlLib.optimize`` module; added GPOS compaction algorithm. + The compaction can be run on existing fonts with ``fonttools otlLib.optimize`` + or using the snippet ``compact_gpos.py``. There's experimental support for + compacting fonts at compilation time using an environment variable, but that + might be removed later (#2326). + +4.24.4 (released 2021-05-25) +---------------------------- + +- [subset/instancer] Fixed ``AttributeError`` when instantiating a VF that + contains GPOS ValueRecords with ``Device`` tables but without the respective + non-Device values (e.g. ``XAdvDevice`` without ``XAdvance``). When not + explicitly set, the latter are assumed to be 0 (#2323). + +4.24.3 (released 2021-05-20) +---------------------------- + +- [otTables] Fixed ``AttributeError`` in methods that split LigatureSubst, + MultipleSubst and AlternateSubst subtables when an offset overflow occurs. + The ``Format`` attribute was removed in v4.22.0 (#2319). + +4.24.2 (released 2021-05-20) +---------------------------- + +- [ttGlyphPen] Fixed typing annotation of TTGlyphPen glyphSet parameter (#2315). +- Fixed two instances of DeprecationWarning: invalid escape sequence (#2311). + +4.24.1 (released 2021-05-20) +---------------------------- + +- [subset] Fixed AttributeError when SinglePos subtable has None Value (ValueFormat 0) + (#2312, #2313). + +4.24.0 (released 2021-05-17) +---------------------------- + +- [pens] Add ``ttGlyphPen.TTGlyphPointPen`` similar to ``TTGlyphPen`` (#2205). + +4.23.1 (released 2021-05-14) +---------------------------- + +- [subset] Fix ``KeyError`` after subsetting ``COLR`` table that initially contains + both v0 and v1 color glyphs when the subset only requested v1 glyphs; we were + not pruning the v0 portion of the table (#2308). +- [colorLib] Set ``LayerV1List`` attribute to ``None`` when empty, it's optional + in CORLv1 (#2308). + +4.23.0 (released 2021-05-13) +---------------------------- + +- [designspaceLib] Allow to use ``\\UNC`` absolute paths on Windows (#2299, #2306). +- [varLib.merger] Fixed bug where ``VarLibMergeError`` was raised with incorrect + parameters (#2300). +- [feaLib] Allow substituting a glyph class with ``NULL`` to delete multiple glyphs + (#2303). +- [glyf] Fixed ``NameError`` exception in ``getPhantomPoints`` (#2295, #2305). +- [removeOverlaps] Retry pathops.simplify after rounding path coordinates to integers + if it fails the first time using floats, to work around a rare and hard to debug + Skia bug (#2288). +- [varLib] Added support for building, reading, writing and optimizing 32-bit + ``ItemVariationStore`` as used in COLRv1 table (#2285). +- [otBase/otConverters] Add array readers/writers for int types (#2285). +- [feaLib] Allow more than one lookahead glyph/class in contextual positioning with + "value at end" (#2293, #2294). +- [COLRv1] Default varIdx should be 0xFFFFFFFF (#2297, #2298). +- [pens] Make RecordingPointPen actually pass on identifiers; replace asserts with + explicit ``PenError`` exception (#2284). +- [mutator] Round lsb for CF2 fonts as well (#2286). + +4.22.1 (released 2021-04-26) +---------------------------- + +- [feaLib] Skip references to named lookups if the lookup block definition + is empty, similarly to makeotf. This also fixes an ``AttributeError`` while + generating ``aalt`` feature (#2276, #2277). +- [subset] Fixed bug with ``--no-hinting`` implementation for Device tables (#2272, + #2275). The previous code was alwyas dropping Device tables if no-hinting was + requested, but some Device tables (DeltaFormat=0x8000) are also used to encode + variation indices and need to be retained. +- [otBase] Fixed bug in getting the ValueRecordSize when decompiling ``MVAR`` + table with ``lazy=True`` (#2273, #2274). +- [varLib/glyf/gvar] Optimized and simplified ``GlyphCoordinates`` and + ``TupleVariation`` classes, use ``bytearray`` where possible, refactored + phantom-points calculations. We measured about 30% speedup in total time + of loading master ttfs, building gvar, and saving (#2261, #2266). +- [subset] Fixed ``AssertionError`` while pruning unused CPAL palettes when + ``0xFFFF`` is present (#2257, #2259). + +4.22.0 (released 2021-04-01) +---------------------------- + +- [ttLib] Remove .Format from Coverage, ClassDef, SingleSubst, LigatureSubst, + AlternateSubst, MultipleSubst (#2238). + ATTENTION: This will change your TTX dumps! +- [misc.arrayTools] move Vector to its own submodule, and rewrite as a tuple + subclass (#2201). +- [docs] Added a terminology section for varLib (#2209). +- [varLib] Move rounding to VariationModel, to avoid error accumulation from + multiple deltas (#2214) +- [varLib] Explain merge errors in more human-friendly terms (#2223, #2226) +- [otlLib] Correct some documentation (#2225) +- [varLib/otlLib] Allow merging into VariationFont without first saving GPOS + PairPos2 (#2229) +- [subset] Improve PairPosFormat2 subsetting (#2221) +- [ttLib] TTFont.save: create file on disk as late as possible (#2253) +- [cffLib] Add missing CFF2 dict operators LanguageGroup and ExpansionFactor + (#2249) + ATTENTION: This will change your TTX dumps! + +4.21.1 (released 2021-02-26) +---------------------------- + +- [pens] Reverted breaking change that turned ``AbstractPen`` and ``AbstractPointPen`` + into abstract base classes (#2164, #2198). + +4.21.0 (released 2021-02-26) +---------------------------- + +- [feaLib] Indent anchor statements in ``asFea()`` to make them more legible and + diff-able (#2193). +- [pens] Turn ``AbstractPen`` and ``AbstractPointPen`` into abstract base classes + (#2164). +- [feaLib] Added support for parsing and building ``STAT`` table from AFDKO feature + files (#2039). +- [instancer] Added option to update name table of generated instance using ``STAT`` + table's axis values (#2189). +- [bezierTools] Added functions to compute bezier point-at-time, as well as line-line, + curve-line and curve-curve intersections (#2192). + +4.20.0 (released 2021-02-15) +---------------------------- + +- [COLRv1] Added ``unbuildColrV1`` to deconstruct COLRv1 otTables to raw json-able + data structure; it does the reverse of ``buildColrV1`` (#2171). +- [feaLib] Allow ``sub X by NULL`` sequence to delete a glyph (#2170). +- [arrayTools] Fixed ``Vector`` division (#2173). +- [COLRv1] Define new ``PaintSweepGradient`` (#2172). +- [otTables] Moved ``Paint.Format`` enum class outside of ``Paint`` class definition, + now named ``PaintFormat``. It was clashing with paint instance ``Format`` attribute + and thus was breaking lazy load of COLR table which relies on magic ``__getattr__`` + (#2175). +- [COLRv1] Replace hand-coded builder functions with otData-driven dynamic + implementation (#2181). +- [COLRv1] Define additional static (non-variable) Paint formats (#2181). +- [subset] Added support for subsetting COLR v1 and CPAL tables (#2174, #2177). +- [fontBuilder] Allow ``setupFvar`` to optionally take ``designspaceLib.AxisDescriptor`` + objects. Added new ``setupAvar`` method. Support localised names for axes and + named instances (#2185). + +4.19.1 (released 2021-01-28) +---------------------------- + +- [woff2] An initial off-curve point with an overlap flag now stays an off-curve + point after compression. + +4.19.0 (released 2021-01-25) +---------------------------- + +- [codecs] Handle ``errors`` parameter different from 'strict' for the custom + extended mac encodings (#2137, #2132). +- [featureVars] Raise better error message when a script is missing the required + default language system (#2154). +- [COLRv1] Avoid abrupt change caused by rounding ``PaintRadialGradient.c0`` when + the start circle almost touches the end circle's perimeter (#2148). +- [COLRv1] Support building unlimited lists of paints as 255-ary trees of + ``PaintColrLayers`` tables (#2153). +- [subset] Prune redundant format-12 cmap subtables when all non-BMP characters + are dropped (#2146). +- [basePen] Raise ``MissingComponentError`` instead of bare ``KeyError`` when a + referenced component is missing (#2145). + +4.18.2 (released 2020-12-16) +---------------------------- + +- [COLRv1] Implemented ``PaintTranslate`` paint format (#2129). +- [varLib.cff] Fixed unbound local variable error (#1787). +- [otlLib] Don't crash when creating OpenType class definitions if some glyphs + occur more than once (#2125). + +4.18.1 (released 2020-12-09) +---------------------------- + +- [colorLib] Speed optimization for ``LayerV1ListBuilder`` (#2119). +- [mutator] Fixed missing tab in ``interpolate_cff2_metrics`` (0957dc7a). + +4.18.0 (released 2020-12-04) +---------------------------- + +- [COLRv1] Update to latest draft: added ``PaintRotate`` and ``PaintSkew`` (#2118). +- [woff2] Support new ``brotlicffi`` bindings for PyPy (#2117). +- [glifLib] Added ``expectContentsFile`` parameter to ``GlyphSet``, for use when + reading existing UFOs, to comply with the specification stating that a + ``contents.plist`` file must exist in a glyph set (#2114). +- [subset] Allow ``LangSys`` tags in ``--layout-scripts`` option (#2112). For example: + ``--layout-scripts=arab.dflt,arab.URD,latn``; this will keep ``DefaultLangSys`` + and ``URD`` language for ``arab`` script, and all languages for ``latn`` script. +- [varLib.interpolatable] Allow UFOs to be checked; report open paths, non existant + glyphs; add a ``--json`` option to produce a machine-readable list of + incompatibilities +- [pens] Added ``QuartzPen`` to create ``CGPath`` from glyph outlines on macOS. + Requires pyobjc (#2107). +- [feaLib] You can export ``FONTTOOLS_LOOKUP_DEBUGGING=1`` to enable feature file + debugging info stored in ``Debg`` table (#2106). +- [otlLib] Build more efficient format 1 and format 2 contextual lookups whenever + possible (#2101). + +4.17.1 (released 2020-11-16) +---------------------------- + +- [colorLib] Fixed regression in 4.17.0 when building COLR v0 table; when color + layers are stored in UFO lib plist, we can't distinguish tuples from lists so + we need to accept either types (e5439eb9, googlefonts/ufo2ft/issues#426). + +4.17.0 (released 2020-11-12) +---------------------------- + +- [colorLib/otData] Updated to latest draft ``COLR`` v1 spec (#2092). +- [svgLib] Fixed parsing error when arc commands' boolean flags are not separated + by space or comma (#2094). +- [varLib] Interpret empty non-default glyphs as 'missing', if the default glyph is + not empty (#2082). +- [feaLib.builder] Only stash lookup location for ``Debg`` if ``Builder.buildLookups_`` + has cooperated (#2065, #2067). +- [varLib] Fixed bug in VarStore optimizer (#2073, #2083). +- [varLib] Add designspace lib key for custom feavar feature tag (#2080). +- Add HashPointPen adapted from psautohint. With this pen, a hash value of a glyph + can be computed, which can later be used to detect glyph changes (#2005). + +4.16.1 (released 2020-10-05) +---------------------------- + +- [varLib.instancer] Fixed ``TypeError`` exception when instantiating a VF with + a GSUB table 1.1 in which ``FeatureVariations`` attribute is present but set to + ``None`` -- indicating that optional ``FeatureVariations`` is missing (#2077). +- [glifLib] Make ``x`` and ``y`` attributes of the ``point`` element required + even when validation is turned off, and raise a meaningful ``GlifLibError`` + message when that happens (#2075). + +4.16.0 (released 2020-09-30) +---------------------------- + +- [removeOverlaps] Added new module and ``removeOverlaps`` function that merges + overlapping contours and components in TrueType glyphs. It requires the + `skia-pathops `__ module. + Note that removing overlaps invalidates the TrueType hinting (#2068). +- [varLib.instancer] Added ``--remove-overlaps`` command-line option. + The ``overlap`` option in ``instantiateVariableFont`` now takes an ``OverlapMode`` + enum: 0: KEEP_AND_DONT_SET_FLAGS, 1: KEEP_AND_SET_FLAGS (default), and 2: REMOVE. + The latter is equivalent to calling ``removeOverlaps`` on the generated static + instance. The option continues to accept ``bool`` value for backward compatibility. + + +4.15.0 (released 2020-09-21) +---------------------------- + +- [plistlib] Added typing annotations to plistlib module. Set up mypy static + typechecker to run automatically on CI (#2061). +- [ttLib] Implement private ``Debg`` table, a reverse-DNS namespaced JSON dict. +- [feaLib] Optionally add an entry into the ``Debg`` table with the original + lookup name (if any), feature name / script / language combination (if any), + and original source filename and line location. Annotate the ttx output for + a lookup with the information from the Debg table (#2052). +- [sfnt] Disabled checksum checking by default in ``SFNTReader`` (#2058). +- [Docs] Document ``mtiLib`` module (#2027). +- [varLib.interpolatable] Added checks for contour node count and operation type + of each node (#2054). +- [ttLib] Added API to register custom table packer/unpacker classes (#2055). + +4.14.0 (released 2020-08-19) +---------------------------- + +- [feaLib] Allow anonymous classes in LookupFlags definitions (#2037). +- [Docs] Better document DesignSpace rules processing order (#2041). +- [ttLib] Fixed 21-year old bug in ``maxp.maxComponentDepth`` calculation (#2044, + #2045). +- [varLib.models] Fixed misspelled argument name in CLI entry point (81d0042a). +- [subset] When subsetting GSUB v1.1, fixed TypeError by checking whether the + optional FeatureVariations table is present (e63ecc5b). +- [Snippets] Added snippet to show how to decompose glyphs in a TTF (#2030). +- [otlLib] Generate GSUB type 5 and GPOS type 7 contextual lookups where appropriate + (#2016). + +4.13.0 (released 2020-07-10) +---------------------------- + +- [feaLib/otlLib] Moved lookup subtable builders from feaLib to otlLib; refactored + some common code (#2004, #2007). +- [docs] Document otlLib module (#2009). +- [glifLib] Fixed bug with some UFO .glif filenames clashing on case-insensitive + filesystems (#2001, #2002). +- [colorLib] Updated COLRv1 implementation following changes in the draft spec: + (#2008, googlefonts/colr-gradients-spec#24). + +4.12.1 (released 2020-06-16) +---------------------------- + +- [_n_a_m_e] Fixed error in ``addMultilingualName`` with one-character names. + Only attempt to recovered malformed UTF-16 data from a ``bytes`` string, + not from unicode ``str`` (#1997, #1998). + +4.12.0 (released 2020-06-09) +---------------------------- + +- [otlLib/varLib] Ensure that the ``AxisNameID`` in the ``STAT`` and ``fvar`` + tables is grater than 255 as per OpenType spec (#1985, #1986). +- [docs] Document more modules in ``fontTools.misc`` package: ``filenames``, + ``fixedTools``, ``intTools``, ``loggingTools``, ``macCreatorType``, ``macRes``, + ``plistlib`` (#1981). +- [OS/2] Don't calculate whole sets of unicode codepoints, use faster and more memory + efficient ranges and bisect lookups (#1984). +- [voltLib] Support writing back abstract syntax tree as VOLT data (#1983). +- [voltLib] Accept DO_NOT_TOUCH_CMAP keyword (#1987). +- [subset/merge] Fixed a namespace clash involving a private helper class (#1955). + +4.11.0 (released 2020-05-28) +---------------------------- + +- [feaLib] Introduced ``includeDir`` parameter on Parser and IncludingLexer to + explicitly specify the directory to search when ``include()`` statements are + encountered (#1973). +- [ufoLib] Silently delete duplicate glyphs within the same kerning group when reading + groups (#1970). +- [ttLib] Set version of COLR table when decompiling COLRv1 (commit 9d8a7e2). + +4.10.2 (released 2020-05-20) +---------------------------- + +- [sfnt] Fixed ``NameError: SimpleNamespace`` while reading TTC header. The regression + was introduced with 4.10.1 after removing ``py23`` star import. + +4.10.1 (released 2020-05-19) +---------------------------- + +- [sfnt] Make ``SFNTReader`` pickleable even when TTFont is loaded with lazy=True + option and thus keeps a reference to an external file (#1962, #1967). +- [feaLib.ast] Restore backward compatibility (broken in 4.10 with #1905) for + ``ChainContextPosStatement`` and ``ChainContextSubstStatement`` classes. + Make them accept either list of lookups or list of lists of lookups (#1961). +- [docs] Document some modules in ``fontTools.misc`` package: ``arrayTools``, + ``bezierTools`` ``cliTools`` and ``eexec`` (#1956). +- [ttLib._n_a_m_e] Fixed ``findMultilingualName()`` when name record's ``string`` is + encoded as bytes sequence (#1963). + +4.10.0 (released 2020-05-15) +---------------------------- + +- [varLib] Allow feature variations to be active across the entire space (#1957). +- [ufoLib] Added support for ``formatVersionMinor`` in UFO's ``fontinfo.plist`` and for + ``formatMinor`` attribute in GLIF file as discussed in unified-font-object/ufo-spec#78. + No changes in reading or writing UFOs until an upcoming (non-0) minor update of the + UFO specification is published (#1786). +- [merge] Fixed merging fonts with different versions of ``OS/2`` table (#1865, #1952). +- [subset] Fixed ``AttributeError`` while subsetting ``ContextSubst`` and ``ContextPos`` + Format 3 subtable (#1879, #1944). +- [ttLib.table._m_e_t_a] if data happens to be ascii, emit comment in TTX (#1938). +- [feaLib] Support multiple lookups per glyph position (#1905). +- [psCharStrings] Use inheritance to avoid repeated code in initializer (#1932). +- [Doc] Improved documentation for the following modules: ``afmLib`` (#1933), ``agl`` + (#1934), ``cffLib`` (#1935), ``cu2qu`` (#1937), ``encodings`` (#1940), ``feaLib`` + (#1941), ``merge`` (#1949). +- [Doc] Split off developer-centric info to new page, making front page of docs more + user-focused. List all utilities and sub-modules with brief descriptions. + Make README more concise and focused (#1914). +- [otlLib] Add function to build STAT table from high-level description (#1926). +- [ttLib._n_a_m_e] Add ``findMultilingualName()`` method (#1921). +- [unicodedata] Update ``RTL_SCRIPTS`` for Unicode 13.0 (#1925). +- [gvar] Sort ``gvar`` XML output by glyph name, not glyph order (#1907, #1908). +- [Doc] Added help options to ``fonttools`` command line tool (#1913, #1920). + Ensure all fonttools CLI tools have help documentation (#1948). +- [ufoLib] Only write fontinfo.plist when there actually is content (#1911). + +4.9.0 (released 2020-04-29) +--------------------------- + +- [subset] Fixed subsetting of FeatureVariations table. The subsetter no longer drops + FeatureVariationRecords that have empty substitutions as that will keep the search + going and thus change the logic. It will only drop empty records that occur at the + end of the FeatureVariationRecords array (#1881). +- [subset] Remove FeatureVariations table and downgrade GSUB/GPOS to version 0x10000 + when FeatureVariations contain no FeatureVariationRecords after subsetting (#1903). +- [agl] Add support for legacy Adobe Glyph List of glyph names in ``fontTools.agl`` + (#1895). +- [feaLib] Ignore superfluous script statements (#1883). +- [feaLib] Hide traceback by default on ``fonttools feaLib`` command line. + Use ``--traceback`` option to show (#1898). +- [feaLib] Check lookup index in chaining sub/pos lookups and print better error + message (#1896, #1897). +- [feaLib] Fix building chained alt substitutions (#1902). +- [Doc] Included all fontTools modules in the sphinx-generated documentation, and + published it to ReadTheDocs for continuous documentation of the fontTools project + (#1333). Check it out at https://fonttools.readthedocs.io/. Thanks to Chris Simpkins! +- [transform] The ``Transform`` class is now subclass of ``typing.NamedTuple``. No + change in functionality (#1904). + + +4.8.1 (released 2020-04-17) +--------------------------- + +- [feaLib] Fixed ``AttributeError: 'NoneType' has no attribute 'getAlternateGlyphs'`` + when ``aalt`` feature references a chain contextual substitution lookup + (googlefonts/fontmake#648, #1878). + +4.8.0 (released 2020-04-16) +--------------------------- + +- [feaLib] If Parser is initialized without a ``glyphNames`` parameter, it cannot + distinguish between a glyph name containing an hyphen, or a range of glyph names; + instead of raising an error, it now interprets them as literal glyph names, while + also outputting a logging warning to alert user about the ambiguity (#1768, #1870). +- [feaLib] When serializing AST to string, emit spaces around hyphens that denote + ranges. Also, fixed an issue with CID ranges when round-tripping AST->string->AST + (#1872). +- [Snippets/otf2ttf] In otf2ttf.py script update LSB in hmtx to match xMin (#1873). +- [colorLib] Added experimental support for building ``COLR`` v1 tables as per + the `colr-gradients-spec `__ + draft proposal. **NOTE**: both the API and the XML dump of ``COLR`` v1 are + susceptible to change while the proposal is being discussed and formalized (#1822). + +4.7.0 (released 2020-04-03) +--------------------------- + +- [cu2qu] Added ``fontTools.cu2qu`` package, imported from the original + `cu2qu `__ project. The ``cu2qu.pens`` module + was moved to ``fontTools.pens.cu2quPen``. The optional cu2qu extension module + can be compiled by installing `Cython `__ before installing + fonttools from source (i.e. git repo or sdist tarball). The wheel package that + is published on PyPI (i.e. the one ``pip`` downloads, unless ``--no-binary`` + option is used), will continue to be pure-Python for now (#1868). + +4.6.0 (released 2020-03-24) +--------------------------- + +- [varLib] Added support for building variable ``BASE`` table version 1.1 (#1858). +- [CPAL] Added ``fromRGBA`` method to ``Color`` class (#1861). + + +4.5.0 (released 2020-03-20) +--------------------------- + +- [designspaceLib] Added ``add{Axis,Source,Instance,Rule}Descriptor`` methods to + ``DesignSpaceDocument`` class, to initialize new descriptor objects using keyword + arguments, and at the same time append them to the current document (#1860). +- [unicodedata] Update to Unicode 13.0 (#1859). + +4.4.3 (released 2020-03-13) +--------------------------- + +- [varLib] Always build ``gvar`` table for TrueType-flavored Variable Fonts, + even if it contains no variation data. The table is required according to + the OpenType spec (#1855, #1857). + +4.4.2 (released 2020-03-12) +--------------------------- + +- [ttx] Annotate ``LookupFlag`` in XML dump with comment explaining what bits + are set and what they mean (#1850). +- [feaLib] Added more descriptive message to ``IncludedFeaNotFound`` error (#1842). + +4.4.1 (released 2020-02-26) +--------------------------- + +- [woff2] Skip normalizing ``glyf`` and ``loca`` tables if these are missing from + a font (e.g. in NotoColorEmoji using ``CBDT/CBLC`` tables). +- [timeTools] Use non-localized date parsing in ``timestampFromString``, to fix + error when non-English ``LC_TIME`` locale is set (#1838, #1839). +- [fontBuilder] Make sure the CFF table generated by fontBuilder can be used by varLib + without having to compile and decompile the table first. This was breaking in + converting the CFF table to CFF2 due to some unset attributes (#1836). + +4.4.0 (released 2020-02-18) +--------------------------- + +- [colorLib] Added ``fontTools.colorLib.builder`` module, initially with ``buildCOLR`` + and ``buildCPAL`` public functions. More color font formats will follow (#1827). +- [fontBuilder] Added ``setupCOLR`` and ``setupCPAL`` methods (#1826). +- [ttGlyphPen] Quantize ``GlyphComponent.transform`` floats to ``F2Dot14`` to fix + round-trip issue when computing bounding boxes of transformed components (#1830). +- [glyf] If a component uses reference points (``firstPt`` and ``secondPt``) for + alignment (instead of X and Y offsets), compute the effective translation offset + *after* having applied any transform (#1831). +- [glyf] When all glyphs have zero contours, compile ``glyf`` table data as a single + null byte in order to pass validation by OTS and Windows (#1829). +- [feaLib] Parsing feature code now ensures that referenced glyph names are part of + the known glyph set, unless a glyph set was not provided. +- [varLib] When filling in the default axis value for a missing location of a source or + instance, correctly map the value forward. +- [varLib] The avar table can now contain mapping output values that are greater than + OR EQUAL to the preceeding value, as the avar specification allows this. +- [varLib] The errors of the module are now ordered hierarchically below VarLibError. + See #1821. + +4.3.0 (released 2020-02-03) +--------------------------- + +- [EBLC/CBLC] Fixed incorrect padding length calculation for Format 3 IndexSubTable + (#1817, #1818). +- [varLib] Fixed error when merging OTL tables and TTFonts were loaded as ``lazy=True`` + (#1808, #1809). +- [varLib] Allow to use master fonts containing ``CFF2`` table when building VF (#1816). +- [ttLib] Make ``recalcBBoxes`` option work also with ``CFF2`` table (#1816). +- [feaLib] Don't reset ``lookupflag`` in lookups defined inside feature blocks. + They will now inherit the current ``lookupflag`` of the feature. This is what + Adobe ``makeotf`` also does in this case (#1815). +- [feaLib] Fixed bug with mixed single/multiple substitutions. If a single substitution + involved a glyph class, we were incorrectly using only the first glyph in the class + (#1814). + +4.2.5 (released 2020-01-29) +--------------------------- + +- [feaLib] Do not fail on duplicate multiple substitutions, only warn (#1811). +- [subset] Optimize SinglePos subtables to Format 1 if all ValueRecords are the same + (#1802). + +4.2.4 (released 2020-01-09) +--------------------------- + +- [unicodedata] Update RTL_SCRIPTS for Unicode 11 and 12. + +4.2.3 (released 2020-01-07) +--------------------------- + +- [otTables] Fixed bug when splitting `MarkBasePos` subtables as offsets overflow. + The mark class values in the split subtable were not being updated, leading to + invalid mark-base attachments (#1797, googlefonts/noto-source#145). +- [feaLib] Only log a warning instead of error when features contain duplicate + substitutions (#1767). +- [glifLib] Strip XML comments when parsing with lxml (#1784, #1785). + +4.2.2 (released 2019-12-12) +--------------------------- + +- [subset] Fixed issue with subsetting FeatureVariations table when the index + of features changes as features get dropped. The feature index need to be + remapped to point to index of the remaining features (#1777, #1782). +- [fontBuilder] Added `addFeatureVariations` method to `FontBuilder` class. This + is a shorthand for calling `featureVars.addFeatureVariations` on the builder's + TTFont object (#1781). +- [glyf] Fixed the flags bug in glyph.drawPoints() like we did for glyph.draw() + (#1771, #1774). + +4.2.1 (released 2019-12-06) +--------------------------- + +- [glyf] Use the ``flagOnCurve`` bit mask in ``glyph.draw()``, so that we ignore + the ``overlap`` flag that may be set when instantiating variable fonts (#1771). + +4.2.0 (released 2019-11-28) +--------------------------- + +- [pens] Added the following pens: + + * ``roundingPen.RoundingPen``: filter pen that rounds coordinates and components' + offsets to integer; + * ``roundingPen.RoundingPointPen``: like the above, but using PointPen protocol. + * ``filterPen.FilterPointPen``: base class for filter point pens; + * ``transformPen.TransformPointPen``: filter point pen to apply affine transform; + * ``recordingPen.RecordingPointPen``: records and replays point-pen commands. + +- [ttGlyphPen] Always round float coordinates and component offsets to integers + (#1763). +- [ufoLib] When converting kerning groups from UFO2 to UFO3, avoid confusing + groups with the same name as one of the glyphs (#1761, #1762, + unified-font-object/ufo-spec#98). + +4.1.0 (released 2019-11-18) +--------------------------- + +- [instancer] Implemented restricting axis ranges (level 3 partial instancing). + You can now pass ``{axis_tag: (min, max)}`` tuples as input to the + ``instantiateVariableFont`` function. Note that changing the default axis + position is not supported yet. The command-line script also accepts axis ranges + in the form of colon-separated float values, e.g. ``wght=400:700`` (#1753, #1537). +- [instancer] Never drop STAT ``DesignAxis`` records, but only prune out-of-range + ``AxisValue`` records. +- [otBase/otTables] Enforce that VarStore.RegionAxisCount == fvar.axisCount, even + when regions list is empty to appease OTS < v8.0 (#1752). +- [designspaceLib] Defined new ``processing`` attribute for ```` element, + with values "first" or "last", plus other editorial changes to DesignSpace + specification. Bumped format version to 4.1 (#1750). +- [varLib] Improved error message when masters' glyph orders do not match (#1758, + #1759). +- [featureVars] Allow to specify custom feature tag in ``addFeatureVariations``; + allow said feature to already exist, in which case we append new lookup indices + to existing features. Implemented ```` attribute ``processing`` according to + DesignSpace specification update in #1750. Depending on this flag, we generate + either an 'rvrn' (always processed first) or a 'rclt' feature (follows lookup order, + therefore last) (#1747, #1625, #1371). +- [ttCollection] Added support for context manager auto-closing via ``with`` statement + like with ``TTFont`` (#1751). +- [unicodedata] Require unicodedata2 >= 12.1.0. +- [py2.py3] Removed yet more PY2 vestiges (#1743). +- [_n_a_m_e] Fixed issue when comparing NameRecords with different string types (#1742). +- [fixedTools] Changed ``fixedToFloat`` to not do any rounding but simply return + ``value / (1 << precisionBits)``. Added ``floatToFixedToStr`` and + ``strToFixedToFloat`` functions to be used when loading from or dumping to XML. + Fixed values (e.g. fvar axes and instance coordinates, avar mappings, etc.) are + are now stored as un-rounded decimal floats upon decompiling (#1740, #737). +- [feaLib] Fixed handling of multiple ``LigatureCaret`` statements for the same glyph. + Only the first rule per glyph is used, additional ones are ignored (#1733). + +4.0.2 (released 2019-09-26) +--------------------------- + +- [voltLib] Added support for ``ALL`` and ``NONE`` in ``PROCESS_MARKS`` (#1732). +- [Silf] Fixed issue in ``Silf`` table compilation and decompilation regarding str vs + bytes in python3 (#1728). +- [merge] Handle duplicate glyph names better: instead of appending font index to + all glyph names, use similar code like we use in ``post`` and ``CFF`` tables (#1729). + +4.0.1 (released 2019-09-11) +--------------------------- + +- [otTables] Support fixing offset overflows in ``MultipleSubst`` lookup subtables + (#1706). +- [subset] Prune empty strikes in ``EBDT`` and ``CBDT`` table data (#1698, #1633). +- [pens] Fixed issue in ``PointToSegmentPen`` when last point of closed contour has + same coordinates as the starting point and was incorrectly dropped (#1720). +- [Graphite] Fixed ``Sill`` table output to pass OTS (#1705). +- [name] Added ``removeNames`` method to ``table__n_a_m_e`` class (#1719). +- [ttLib] Added aliases for renamed entries ``ascender`` and ``descender`` in + ``hhea`` table (#1715). + +4.0.0 (released 2019-08-22) +--------------------------- + +- NOTE: The v4.x version series only supports Python 3.6 or greater. You can keep + using fonttools 3.x if you need support for Python 2. +- [py23] Removed all the python2-only code since it is no longer reachable, thus + unused; only the Python3 symbols were kept, but these are no-op. The module is now + DEPRECATED and will removed in the future. +- [ttLib] Fixed UnboundLocalError for empty loca/glyph tables (#1680). Also, allow + the glyf table to be incomplete when dumping to XML (#1681). +- [varLib.models] Fixed KeyError while sorting masters and there are no on-axis for + a given axis (38a8eb0e). +- [cffLib] Make sure glyph names are unique (#1699). +- [feaLib] Fix feature parser to correctly handle octal numbers (#1700). + +\... see `here `__ for earlier changes diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/RECORD b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..4c8c4e7463d6b40bd44cace41cd1a8f93939267a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/RECORD @@ -0,0 +1,352 @@ +../../../bin/fonttools,sha256=EeGPCL9DR8H0PH96R8wJ8Nk0Ks7h74doe1zo97rTNuY,371 +../../../bin/pyftmerge,sha256=ERZx54E9kRjFr3UmPtryLPOod40p8Urq62hPfpkxdmM,368 +../../../bin/pyftsubset,sha256=rW4Pd3xLgxdRr4-kUrfMVAIosGL_KovKgilFtzRF9XY,369 +../../../bin/ttx,sha256=vK_TNkrwsDlBh4EsdWC0RxgxRCc5KNER2WeSGXbCEes,366 +../../../share/man/man1/ttx.1,sha256=cLbm_pOOj1C76T2QXvDxzwDj9gk-GTd5RztvTMsouFw,5377 +fontTools/__init__.py,sha256=fgEzyisKw1rSSjT7FZcZnqK0rK2jqxT-G6ctz4mAi9U,183 +fontTools/__main__.py,sha256=VjkGh1UD-i1zTDA1dXo1uecSs6PxHdGQ5vlCk_mCCYs,925 +fontTools/afmLib.py,sha256=1MagIItOzRV4vV5kKPxeDZbPJsfxLB3wdHLFkQvl0uk,13164 +fontTools/agl.py,sha256=05bm8Uq45uVWW8nPbP6xbNgmFyxQr8sWhYAiP0VSjnI,112975 +fontTools/cffLib/CFF2ToCFF.py,sha256=pvwh6qxJ0D7c4xgXBcyAdmZGzpTiywMy45-jjp7dKck,6088 +fontTools/cffLib/CFFToCFF2.py,sha256=Qnk7lYlsTRHnlZQ6NXNdr_f4MJwZQ21kcS08KFbsyY8,10119 +fontTools/cffLib/__init__.py,sha256=62vpcR7u8cE407kXduAwnFttHnsoCpDQ7IBK-qOYFQ8,107886 +fontTools/cffLib/specializer.py,sha256=vsOPkR_jHNe6tESQEjmm0i76y7sWI5MKo3bsTmI3sNM,32609 +fontTools/cffLib/transforms.py,sha256=kHBnYQmcJBLIMUC6Uws4eor2mJiNNHiR_eRePXHDPC8,17371 +fontTools/cffLib/width.py,sha256=IqGL0CLyCZqi_hvsHySG08qpYxS3kaqW-tsAT-bjHV4,6074 +fontTools/colorLib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fontTools/colorLib/builder.py,sha256=kmO7OuudQQb3fEOS7aLzgTDVjqS9i2xIQmk9p1uBe8A,23008 +fontTools/colorLib/errors.py,sha256=CsaviiRxxrpgVX4blm7KCyK8553ljwL44xkJOeC5U7U,41 +fontTools/colorLib/geometry.py,sha256=3ScySrR2YDJa7d5K5_xM5Yt1-3NCV-ry8ikYA5VwVbI,5518 +fontTools/colorLib/table_builder.py,sha256=ZeltWY6n-YPiJv_hQ1iBXoEFAG70EKxZyScgsMKUFGU,7469 +fontTools/colorLib/unbuilder.py,sha256=iW-E5I39WsV82K3NgCO4Cjzwm1WqzGrtypHt8epwbHM,2142 +fontTools/config/__init__.py,sha256=JICOHIz06KuHCiBmrxj-ga19P6ZTuLXh0lHmPh-Ra1w,3154 +fontTools/cu2qu/__init__.py,sha256=Cuc7Uglb0nSgaraTxXY5J8bReznH5wApW0uakN7MycY,618 +fontTools/cu2qu/__main__.py,sha256=kTUI-jczsHeelULLlory74QEeFjZWp9zigCc7PrdVQY,92 +fontTools/cu2qu/benchmark.py,sha256=wasPJmf8q9k9UHjpHChC3WQAGbBAyHN9PvJzXvWC0Fw,1296 +fontTools/cu2qu/cli.py,sha256=MbAQnOpZwrUFe_tjAP3Tgf6uLdOgHlONUcPNeTXwH0Y,6076 +fontTools/cu2qu/cu2qu.c,sha256=5MqBt8fFiiaXVyfUVZbAkI1P1CP9g6ZSyk7LbKzmx78,629339 +fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so,sha256=h_-2nNpp4TjpN50hFZnvIb0PFC_PQA54v7GHylEND7k,1024424 +fontTools/cu2qu/cu2qu.py,sha256=GGNdNWT4xgrvxeZTczIj9Nxi6vN-5lb90KyVmB95W0Y,16439 +fontTools/cu2qu/errors.py,sha256=PyJNMy8lHDtKpfFkc0nkM8F4jNLZAC4lPQCN1Km4bpg,2441 +fontTools/cu2qu/ufo.py,sha256=qZR70uWdCia19Ff8GLn5NeItscvvn69DegjDZVF4eNI,11794 +fontTools/designspaceLib/__init__.py,sha256=NGIC5zaq0NDdSkOyl6-i327cAzgCS3jeayEDvMEXRwY,129263 +fontTools/designspaceLib/__main__.py,sha256=xhtYXo1T1tsykhQDD0tcconSNYgWL5hoTBORpVDUYrc,103 +fontTools/designspaceLib/split.py,sha256=FB1NuvhUO453UXveQZi9oyrW_caoCPM3RADp1rYWkDs,19239 +fontTools/designspaceLib/statNames.py,sha256=gXGKWVr1ju2_oL-R_DkyoZ3GlI7mfLORovpk1Ebgmvc,9237 +fontTools/designspaceLib/types.py,sha256=ofK65qXNADqcpl7zI72Pa5s07-cm7G41iEmLVV44-Es,5320 +fontTools/encodings/MacRoman.py,sha256=4vEooUDm2gLCG8KIIDhRxm5-A64w7XrhP9cjDRr2Eo0,3576 +fontTools/encodings/StandardEncoding.py,sha256=Eo3AGE8FE_p-IVYYuV097KouSsF3UrXoRRN0XyvYbrs,3581 +fontTools/encodings/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/encodings/codecs.py,sha256=u50ruwz9fcRsrUrRGpR17Cr55Ovn1fvCHCKrElVumDE,4721 +fontTools/feaLib/__init__.py,sha256=jlIru2ghxvb1HhC5Je2BCXjFJmFQlYKpruorPoz3BvQ,213 +fontTools/feaLib/__main__.py,sha256=Df2PA6LXwna98lSXiL7R4as_ZEdWCIk3egSM5w7GpvM,2240 +fontTools/feaLib/ast.py,sha256=48Y6vpSD_wYfucWyh_bQtzf2AQFX-pOwBvsxdcpVDz0,74158 +fontTools/feaLib/builder.py,sha256=f8bet-VuwM6aIBIUPgKUQ01chyTSaWOzn152zq4Y1rY,73165 +fontTools/feaLib/error.py,sha256=Bz_5tNcNVcY7_nrAmFlQNhQldtqZWd8WUGQ2E3PWhZo,648 +fontTools/feaLib/lexer.c,sha256=yVnAVeP6s8MEmqVhbu3aZHdgJZxF-AQyzNssVS-09rY,753326 +fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so,sha256=GVewKKuK-Fasd0UH8NxHVa4uWc-SQ_UTrZBnhYPOVgw,1414432 +fontTools/feaLib/lexer.py,sha256=emyMPmRoqNZkzxnJyI6JRCCtXrbCOFofwa9O6ABGLiw,11121 +fontTools/feaLib/location.py,sha256=JXzHqGV56EHdcq823AwA5oaK05hf_1ySWpScbo3zGC0,234 +fontTools/feaLib/lookupDebugInfo.py,sha256=gVRr5-APWfT_a5-25hRuawSVX8fEvXVsOSLWkH91T2w,304 +fontTools/feaLib/parser.py,sha256=YDy5ySnQaNXB-q-fSpcIAbBRzQO9jJH8t2Y7jt3vpuI,99331 +fontTools/feaLib/variableScalar.py,sha256=Xu8tpDlQbfIfjnKnYDEf43EqVdyIJUy8_1ROVPg9_mg,4069 +fontTools/fontBuilder.py,sha256=yF2-IYl_hao-Zy_FWSI4R-HnlFpzFrz0YBGQO8zfaOs,34130 +fontTools/help.py,sha256=bAjatvIhV7TJyXI7WhsxdYO4YVlhScZXu_kRtHANEPo,1125 +fontTools/merge/__init__.py,sha256=8i6ownyQTAOBKWnTEHvvCYFw64Mv7Z1HPBgJI-ZiuKo,8256 +fontTools/merge/__main__.py,sha256=hDx3gfbUBO83AJKumSEhiV-xqNTJNNgK2uFjazOGTmw,94 +fontTools/merge/base.py,sha256=l0G1Px98E9ZdVuFLMUBKWdtr7Jb8JX8vxcjeaDUUnzY,2389 +fontTools/merge/cmap.py,sha256=HpthxVH5lA7VegJ8yHoBjd9vrFBV7UB5OknKGYpxWY8,6728 +fontTools/merge/layout.py,sha256=fkMPGPLxEdxohS3scVM4W7LmNthSz-UPyocsffe2KqE,16075 +fontTools/merge/options.py,sha256=xko_1-WErcNQkirECzIOOYxSJR_bRtdQYQYOtmgccYI,2501 +fontTools/merge/tables.py,sha256=7SzXYL04awDEDhvU2-9T_8A2gAjvgGyYAHUICUJOpZg,10958 +fontTools/merge/unicode.py,sha256=kb1Jrfuoq1KUcVhhSKnflAED_wMZxXDjVwB-CI9k05Y,4273 +fontTools/merge/util.py,sha256=BH3bZWNFy-Tsj1cth7aSpGVJ18YXKXqDakPn6Wzku6U,3378 +fontTools/misc/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/misc/arrayTools.py,sha256=jZk__GE-K9VViZE_H-LPPj0smWbKng-yfPE8BfGp8HI,11483 +fontTools/misc/bezierTools.c,sha256=bvcuD_CKMVNW8nJ7nv5AQQrGrhv72MCeSDR_GQBrF1Y,1820774 +fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so,sha256=C7oZ_dcR4C-8Kddy4xPvURk5pWpc7p-I0ZLgI8bXliQ,4714576 +fontTools/misc/bezierTools.py,sha256=OmR3pzCGExNvZyTPrByH7gQHpAJsYOl1cmvfYQIVfQA,45038 +fontTools/misc/classifyTools.py,sha256=zcg3EM4GOerBW9c063ljaLllgeeZ772EpFZjp9CdgLI,5613 +fontTools/misc/cliTools.py,sha256=qCznJMLCQu3ZHQD_4ctUnr3TkfAUdkGl-UuxZUrppy0,1862 +fontTools/misc/configTools.py,sha256=YXBE_vL2dMWCnK4oY3vtU15B79q82DtKp7h7XRqJc1Q,11188 +fontTools/misc/cython.py,sha256=eyLcL2Bw-SSToYro8f44dkkYRlQfiFbhcza0afS-qHE,682 +fontTools/misc/dictTools.py,sha256=VxjarsGJuk_wa3z29FSCtKZNCFfXtMBiNEu0RPAlpDk,2417 +fontTools/misc/eexec.py,sha256=GNn2OCRvO1HbbIeDPxk9i0glO7cux_AQaoVMXhBR8y8,3331 +fontTools/misc/encodingTools.py,sha256=hCv5PFfnXQJVCZA8Wyn1vr3vzLBbUuEPtGk5CzWM9RY,2073 +fontTools/misc/etree.py,sha256=ZzJc6TvAS579deAgZLVDvTY_HeTm-ZsKJ5s3LYhZSSY,16304 +fontTools/misc/filenames.py,sha256=MMCO3xjk1pcDc-baobcKd8IdoFPt-bcGqu8t8HUGAkI,8223 +fontTools/misc/filesystem/__init__.py,sha256=iwoOj6DpXKk8q-NRRHqOfRxFF6lcXIhsIA46j-cZswU,2011 +fontTools/misc/filesystem/_base.py,sha256=p74O7xREadfPQgzPJ9mP3ehu0ZHDgsmXlpsL9CnTRso,4010 +fontTools/misc/filesystem/_copy.py,sha256=ifMSs-A_bz1Aa4tIQrlUd9HtdJQ5fp5M3B6mbYuDtXI,1361 +fontTools/misc/filesystem/_errors.py,sha256=-YziRB1BT1I80ypmufvCR-M_4XoerCHyBVqX-cRnIzU,641 +fontTools/misc/filesystem/_info.py,sha256=pbV7bDTJ5F8ms6alK34J0FZYWzmRO7FT0NM3yRA3czo,2013 +fontTools/misc/filesystem/_osfs.py,sha256=RkKCE2IxcRaj7gyqFW10LhEm_-VJYtXxsS5s0DCXihM,5737 +fontTools/misc/filesystem/_path.py,sha256=frP6ZLmMeP9E3NiwoCbbgBPWpQbLBRh7T-0vOE-EuPo,1745 +fontTools/misc/filesystem/_subfs.py,sha256=vRotQwyLVfINbR88xIBQUbq9j4Kmg1_mJvEhnpvK_t4,3028 +fontTools/misc/filesystem/_tempfs.py,sha256=9FUXdCBTwFtZMFx8ghYuZVYoQdDb0tDB-jXNu3D-Qy0,924 +fontTools/misc/filesystem/_tools.py,sha256=r75dpadp7C9EdQ6r7pJQKZlCZDUJzVq2ikb_LXN-wCI,972 +fontTools/misc/filesystem/_walk.py,sha256=KMQ-GavWYr4SsA5V8ohLPmz3boilvY2P0JKrLoxW6NU,1655 +fontTools/misc/filesystem/_zipfs.py,sha256=i3qolbkDRntB_oL3v79KuEgfVlVojecPBnBA0X04PWc,6301 +fontTools/misc/fixedTools.py,sha256=gsotTCOJLyMis13M4_jQJ8-QPob2Gl2TtNJhW6FER1I,7647 +fontTools/misc/intTools.py,sha256=l6pjk4UYlXcyLtfC0DdOC5RL6UJ8ihRR0zRiYow5xA8,586 +fontTools/misc/iterTools.py,sha256=17H6LPZszp32bTKoNorp6uZF1PKj47BAbe5QG8irUjo,390 +fontTools/misc/lazyTools.py,sha256=BC6MmF-OzJ3GrBD8TYDZ-VCSN4UOx0pN0r3oF4GSoiw,1020 +fontTools/misc/loggingTools.py,sha256=NOYROsLK5TzONK5967OGdVonNyXC6kP_CmPr7M2PW_c,19933 +fontTools/misc/macCreatorType.py,sha256=Je9jtqUr7EPbpH3QxlVl3pizoQ-1AOPMBIctHIMTM3k,1593 +fontTools/misc/macRes.py,sha256=GT_pnfPw2NCvvOF86nHLAnOtZ6SMHqEuLntaplXzvHM,8579 +fontTools/misc/plistlib/__init__.py,sha256=1HfhHPt3As6u2eRSlFfl6XdnXv_ypQImeQdWIw6wK7Y,21113 +fontTools/misc/plistlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fontTools/misc/psCharStrings.py,sha256=Tb5-k_5krP0eu7qD054iGxE4Zybk9oB4jdiKzcsV0rw,43036 +fontTools/misc/psLib.py,sha256=ioIPm5x3MHkBXF2vzNkC4iVZYobrkWcyvFhmYsjOrPY,12099 +fontTools/misc/psOperators.py,sha256=9SLl5PPBulLo0Xxg_dqlJMitNIBdiGKdkXhOWsNSYZE,15700 +fontTools/misc/py23.py,sha256=aPVCEUz_deggwLBCeTSsccX6QgJavZqvdVtuhpzrPvA,2238 +fontTools/misc/roundTools.py,sha256=1RSXZ0gyi1qW42tz6WSBMJD1FlPdtgqKfWixVN9bd78,3173 +fontTools/misc/sstruct.py,sha256=vUODd2CKvHLtjr7yn1K94Hui_yxOPWKmlgAmBMm3KDQ,7009 +fontTools/misc/symfont.py,sha256=x5ZwqK9Ik9orG6qSftgVGygBFE1wTSngrMK2We1Z5AM,6977 +fontTools/misc/testTools.py,sha256=3vj_KllUQVEiVFbS0SzTmeuKv44-L-disI1dZ4XhOfw,7052 +fontTools/misc/textTools.py,sha256=pbhr6LVhm3J-0Z4saYnJfxBDzyoiw4BR9pAgwypiOw8,3377 +fontTools/misc/timeTools.py,sha256=e9h5pgzL04tBDXmCv_8eRGB4boFV8GKXlS6dq3ggEpw,2234 +fontTools/misc/transform.py,sha256=OR8dPsAw87z77gkZQMq00iUkDWLIxYv-12XiKH1-erk,15798 +fontTools/misc/treeTools.py,sha256=tLWkwyDHeZUPVOGNnJeD4Pn7x2bQeZetwJKaEAW2J2M,1269 +fontTools/misc/vector.py,sha256=6lqZcDjAgHJFQgjzD-ULQ_PrigAMfeZKaBZmAfcC0ig,4062 +fontTools/misc/visitor.py,sha256=c9YQs14bXfL1SmqwoAhEGCvXlmyZIKR5_Vr4n7iWaRs,5610 +fontTools/misc/xmlReader.py,sha256=igut4_d13RT4WarliqVvuuPybO1uSXVeoBOeW4j0_e4,6580 +fontTools/misc/xmlWriter.py,sha256=CrNXQfNJRdt5CHKAZ4-qy3h1yJeL63ot17kXkNbeI_E,6829 +fontTools/mtiLib/__init__.py,sha256=EzYwNaENLf906h1THBeq6nSRHUKpOAYxuzO9x9PHzh8,46602 +fontTools/mtiLib/__main__.py,sha256=gd8X89jnZOe-752k7uaR1lWoiju-2zIT5Yx35Kl0Xek,94 +fontTools/otlLib/__init__.py,sha256=D2leUW-3gsUTOFcJYGC18edBYjIJ804ut4qitJYWsaQ,45 +fontTools/otlLib/builder.py,sha256=MHuQKul3Nl3vzXmY7Wb6x54ZNbJGUzRUD7-VKpOG7lE,128727 +fontTools/otlLib/error.py,sha256=cthuhBuOwZYpkTLi5gFPupUxkXkCHe-L_YgkE7N1wCI,335 +fontTools/otlLib/maxContextCalc.py,sha256=3es4Kt84TaZ49sA2ev1zrlwPJikJCAECx5KavwhyB-I,3175 +fontTools/otlLib/optimize/__init__.py,sha256=UUQRpNkHU2RczCRt-Gz7sEiYE9AQq9BHLXZEOyvsnX4,1530 +fontTools/otlLib/optimize/__main__.py,sha256=BvP472kA9KxBb9RMyyehPNevAfpmgW9MfdazkUiAO3M,104 +fontTools/otlLib/optimize/gpos.py,sha256=htOgSP743DZDUKF3eWAeJ-kdqNYOnpGXdlV-rbEXJ1A,17668 +fontTools/pens/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/pens/areaPen.py,sha256=Y1WkmqzcC4z_bpGAR0IZUKrtHFtxKUQBmr5-64_zCOk,1472 +fontTools/pens/basePen.py,sha256=eIGSKrKm6w4LLHuG6XJoQZ3eObtoKV5P6aF4gT4sk7U,17073 +fontTools/pens/boundsPen.py,sha256=wE3owOQA8DfhH-zBGC3lJvnVwp-oyIt0KZrEqXbmS9I,3129 +fontTools/pens/cairoPen.py,sha256=wuuOJ1qQDSt_K3zscM2nukRyHZTZMwMzzCXCirfq_qQ,592 +fontTools/pens/cocoaPen.py,sha256=IJRQcAxRuVOTQ90bB_Bgjnmz7px_ST5uLF9CW-Y0KPY,612 +fontTools/pens/cu2quPen.py,sha256=gMUwFUsm_-WzBlDjTMQiNnEuI2heomGeOJBX81zYXPo,13007 +fontTools/pens/explicitClosingLinePen.py,sha256=kKKtdZiwaf8Cj4_ytrIDdGB2GMpPPDXm5Nwbw5WDgwU,3219 +fontTools/pens/filterPen.py,sha256=kKSvLmWCW4MkCF0ciJhjTj-LdUGOQL593PFkpm5PhP8,7790 +fontTools/pens/freetypePen.py,sha256=HD-gXJSbgImJdBc8sIBk0HWBdjv3WKFofs6PgCCsGOY,19908 +fontTools/pens/hashPointPen.py,sha256=gElrFyQoOQp3ZbpKHRWPwC61A9OgT2Js8crVUD8BQAY,3573 +fontTools/pens/momentsPen.c,sha256=LkH6tRXOFN-2qH-WheVoA4-F8-mbjMMoJrdqbQeNpsQ,564887 +fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so,sha256=BjXJaC20P_GT37ZoYYb2eiVJxzMfTVgwPwzqi0sHIZk,916288 +fontTools/pens/momentsPen.py,sha256=kjLVXhGe55Abl__Yr1gob0bl0dHe7fPSwyr7TRJnbug,25658 +fontTools/pens/perimeterPen.py,sha256=lr6NzrIWxi4TXBJPbcJsKzqABWfQeil2Bgm9BgUD3N4,2153 +fontTools/pens/pointInsidePen.py,sha256=noEUvBQIeAheDMJwzvvfnEiKhmwbS1i0RQE9jik6Gl4,6355 +fontTools/pens/pointPen.py,sha256=kc-jhMC-UgXZzKz7r6UL36kosYAQQEJ0xjhm3SaX59Y,22730 +fontTools/pens/qtPen.py,sha256=QRNLIry2rQl4E_7ct2tu10-qLHneQp0XV7FfaZ-tcL8,634 +fontTools/pens/qu2cuPen.py,sha256=pRST43-rUpzlOP83Z_Rr0IvIQBCx6RWI6nnNaitQcLk,3985 +fontTools/pens/quartzPen.py,sha256=EH482Kz_xsqYhVRovv6N_T1CXaSvOzUKPLxTaN956tU,1287 +fontTools/pens/recordingPen.py,sha256=VgFZ4NMhnZt1qSTzFEU0cma-gw3kBe47bfSxPYH73rs,12489 +fontTools/pens/reportLabPen.py,sha256=kpfMfOLXt2vOQ5smPsU82ft80FpCPWJzQLl7ENOH8Ew,2066 +fontTools/pens/reverseContourPen.py,sha256=oz64ZRhLAvT7DYMAwGKoLzZXQK8l81jRiYnTZkW6a-Y,4022 +fontTools/pens/roundingPen.py,sha256=vh_FjikRd82-S4I8glgGMGEuGrj5IkCjRT_wmZ8jfqY,4620 +fontTools/pens/statisticsPen.py,sha256=piWK6NjjWqk9MLROjeE2-4EsxVYMyNU7UQFGD_trE9g,9808 +fontTools/pens/svgPathPen.py,sha256=T3b6SZS9B9sVWMK9mSFDtjHeviQs_yOJOZKq5Sg5Zdg,8572 +fontTools/pens/t2CharStringPen.py,sha256=GgGklb5XsCer0w37ujgRLRXx-EuzdFsyCYuzCx4n-Qs,2931 +fontTools/pens/teePen.py,sha256=P1ARJOCMJ6MxK-PB1yZ-ips3CUfnadWYnQ_do6VIasQ,1290 +fontTools/pens/transformPen.py,sha256=s0kUyQdnemUwHvYr2SFboFmh4WY1S9OHBL8L4PJKRwE,4056 +fontTools/pens/ttGlyphPen.py,sha256=yLtB-E5pTQR59OKVYySttWBu1xC2vR8ezSaRhIMtVwg,11870 +fontTools/pens/wxPen.py,sha256=W9RRHlBWHp-CVC4Exvk3ytBmRaB4-LgJPP5Bv7o9BA0,680 +fontTools/qu2cu/__init__.py,sha256=Jfm1JljXbt91w4gyvZn6jzEmVnhRx50sh2fDongrOsE,618 +fontTools/qu2cu/__main__.py,sha256=9FWf6SIZaRaC8SiL0LhjAWC2yIdY9N_9wlRko8m1l2Q,93 +fontTools/qu2cu/benchmark.py,sha256=GMcr_4r7L6K9SmJ13itt-_XKhnKqSVUDPlXUG6IZmmM,1400 +fontTools/qu2cu/cli.py,sha256=U2rooYnVVEalGRAWGFHk-Kp6Okys8wtzdaWLjw1bngY,3714 +fontTools/qu2cu/qu2cu.c,sha256=aAgEd2kICdXMOPZyPWUp6FNP4MekqCJ0A_DO2lzh7yA,689362 +fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so,sha256=Lm5UYlhIShjqcp7oT1WRFD5rN255xxFFTi3p0jrbSw8,1120912 +fontTools/qu2cu/qu2cu.py,sha256=IYtpkwHdfKOXJr65Y_pJ9Lrt_MgJaISAKGMAs5ilFSM,12288 +fontTools/subset/__init__.py,sha256=uOof18oO8Bwl3BDoRqBIG1VvEpY9qSkG0TRmBWLKPFk,137707 +fontTools/subset/__main__.py,sha256=bhtfP2SqP4k799pxtksFgnC-XGNQDr3LcO4lc8T5e5g,95 +fontTools/subset/cff.py,sha256=rqMRJOlX5FacV1LW8aDlVOglgEM87TkMA9bdsYenask,6145 +fontTools/subset/svg.py,sha256=8dLBzQlnIt4_fOKEFDAVlKTucdHvcbCcyG9-a6UBZZ0,9384 +fontTools/subset/util.py,sha256=9SXFYb5Ef9Z58uXmYPCQil8B2i3Q7aFB_1fFDFSppdU,754 +fontTools/svgLib/__init__.py,sha256=IGCLwSbU8jLhq6HI2vSdPQgNs6zDUi5774TgX5MCXPY,75 +fontTools/svgLib/path/__init__.py,sha256=C82fh7xH6ZHsSFVnV848-xeDezpokx1EwTmayJCouFU,1996 +fontTools/svgLib/path/arc.py,sha256=-f5Ym6q4tDWQ76sMNSTUTWgL_7AfgXojvBhtBS7bWwQ,5812 +fontTools/svgLib/path/parser.py,sha256=8T6okMstvgM9ufb2zBcwSzsuuoYbqfnUjNYgb6kjznU,10788 +fontTools/svgLib/path/shapes.py,sha256=xvBUIckKyT9JLy7q_ZP50r6TjvZANyHdZP7wFDzErcI,5322 +fontTools/t1Lib/__init__.py,sha256=p42y70wEIbuX0IIxZG7-b_I-gHto1VLy0gLsDvxCfkw,20865 +fontTools/tfmLib.py,sha256=UMbkM73JXRJVS9t2B-BJc13rSjImaWBuzCoehLwHFhs,14270 +fontTools/ttLib/__init__.py,sha256=1k7qp9z04gA3m6GvxDaINjqrKbzOkdTA_4RnqW_-LrA,661 +fontTools/ttLib/__main__.py,sha256=lHMPWsnzjKPuMFavf6i1gpk9KexiAk4qzgDd50Mbby0,4733 +fontTools/ttLib/macUtils.py,sha256=lj3oeFpyjV7ko_JqnluneITmAtlc119J-vwTTg2s73A,1737 +fontTools/ttLib/removeOverlaps.py,sha256=YBtj1PX-d2jMgCiWGuI6ibghWApUWqH2trJGXNxrbjQ,12612 +fontTools/ttLib/reorderGlyphs.py,sha256=TbxLxqPTUGiKRX3ulGFCwVm2lEisFYlX6caONJr_4oY,10371 +fontTools/ttLib/scaleUpem.py,sha256=U_-NGkwfS9GRIackdEXjGYZ-wSomcUPXQahDneLeArI,14618 +fontTools/ttLib/sfnt.py,sha256=wemkfz93dlAoyo-VVzmg5OLoSSyMFQNzBTTP3Kem-xk,22792 +fontTools/ttLib/standardGlyphOrder.py,sha256=7AY_fVWdtwZ4iv5uWdyKAUcbEQiSDt1lN4sqx9xXwE0,5785 +fontTools/ttLib/tables/B_A_S_E_.py,sha256=H71A9pJ850mvjbrWHqy8iFI2Dxg7102YRtAkfdCooig,369 +fontTools/ttLib/tables/BitmapGlyphMetrics.py,sha256=9gcGPVzsxEYnVBO7YLWfeOuht9PaCl09GmbAqDYqKi0,1769 +fontTools/ttLib/tables/C_B_D_T_.py,sha256=5LNdc8FMir1kC5fvp5iHwWfeuE-RuqdxAArFXaqPjQ0,3646 +fontTools/ttLib/tables/C_B_L_C_.py,sha256=YXlwovoCHYx8THLQD9iBU_VGoaB9LFObEKtL6ddD320,520 +fontTools/ttLib/tables/C_F_F_.py,sha256=yg3mUtYBudgmpG7Bz475j_DNnCelsgrTsM8DH1uR4ek,1978 +fontTools/ttLib/tables/C_F_F__2.py,sha256=YoHfJQdF-ezx4OwRQ2Y2O7rRJEPjOkf3Hx5Y11Xq0AM,807 +fontTools/ttLib/tables/C_O_L_R_.py,sha256=SHwFVNVmoUQR2e87KuTSe-J9LfeegS4f2hEpee29_2o,5993 +fontTools/ttLib/tables/C_P_A_L_.py,sha256=odFjqM4GnjXyQYGEC-e0Gvqms1jQ5zHHG3SDg7y-BI0,11942 +fontTools/ttLib/tables/D_S_I_G_.py,sha256=AgQPM9Cdro1P-ehJjTfsC9mRTTtSc16At0nnpb1XOGI,5517 +fontTools/ttLib/tables/D__e_b_g.py,sha256=KDnfkNOUnm3F13wD_j3YNBOvYadZ40Gf_0170hFkJp0,1134 +fontTools/ttLib/tables/DefaultTable.py,sha256=cOtgkLWPY9qmOH2BSPt4c4IUSdANWTKx2rK1CTxQ4h0,1487 +fontTools/ttLib/tables/E_B_D_T_.py,sha256=uOpmt25gOJQeO1u1IGAyPWgVmh-4vSZqrQEHvOYwbwg,32534 +fontTools/ttLib/tables/E_B_L_C_.py,sha256=LfEVzBg_yWr9dhChzS0U2G-7wNOwzwB0LWoXIUYNKKM,30054 +fontTools/ttLib/tables/F_F_T_M_.py,sha256=_450vdbEH7Y-0_rOwb3Q0hg-Qq2W8C_sHljy7rZtqqM,1683 +fontTools/ttLib/tables/F__e_a_t.py,sha256=ct79Gf__5ALlqfSBn6wvw6fazb31Od71R6vIp6o9XF4,5483 +fontTools/ttLib/tables/G_D_E_F_.py,sha256=QXiILFCRnPNZcwpub6ojN5S9WP6y56LsXi25pUWLgp4,299 +fontTools/ttLib/tables/G_M_A_P_.py,sha256=fvIQumokOCLa8DFeq_xi069F9RROsXSVmDvWtxgyacQ,4720 +fontTools/ttLib/tables/G_P_K_G_.py,sha256=Xi4Hj2OxZ2IZgVyBQ-Qyiie0hPZjpXZkrao-E5EdTWM,4646 +fontTools/ttLib/tables/G_P_O_S_.py,sha256=UkP3mlnyvQg-jj6ZBOh6j-OieVg_goJQ31nlLvoLGSI,397 +fontTools/ttLib/tables/G_S_U_B_.py,sha256=cwFMKO-pgwsn1H8Q9Jb58Z6ZrBrCoN0sqJB0YunBfSk,294 +fontTools/ttLib/tables/G_V_A_R_.py,sha256=13oO2dD-L4yfkrBuR-KN2rc40wh5lLIlx_khwMz5GH4,94 +fontTools/ttLib/tables/G__l_a_t.py,sha256=Xh3IzFgYlvNjrAOn7Ja73DrWrQTJgJxmDFSUKS6yHdM,8645 +fontTools/ttLib/tables/G__l_o_c.py,sha256=5DsxGzaG7HyJVvLlKQeff1lXt-XPWaHNNaf-EYwsKh4,2685 +fontTools/ttLib/tables/H_V_A_R_.py,sha256=6kPLDUGT8EussA3y9gKr_mrgY5PNv7YaK1V0keMXD9w,313 +fontTools/ttLib/tables/J_S_T_F_.py,sha256=Q9TEf3OuyDIxZlmoz9a3c-mDMlJK6YBQ9KcYmiwFRbU,315 +fontTools/ttLib/tables/L_T_S_H_.py,sha256=Iu6syJFuhJj0_7Aan2NPlDuQDIq-AzLwsOQbXVTnlL0,2189 +fontTools/ttLib/tables/M_A_T_H_.py,sha256=-TVu9Nlcs-1shkElbIk-CWtUwXUMdycHFkjvPE8C_fs,342 +fontTools/ttLib/tables/M_E_T_A_.py,sha256=sA6ookcjchw8UYVEuS8QEXc62I9_Rms9cu_jKA6MkNI,11989 +fontTools/ttLib/tables/M_V_A_R_.py,sha256=67cEuiTw5y5W1Zk98L_S_SmJINIfy_mzWCkyHcujz94,308 +fontTools/ttLib/tables/O_S_2f_2.py,sha256=1Pq2Xu4oYOJePTHC_hTKg3RIfKely3j6T1u_lMTEpD8,28030 +fontTools/ttLib/tables/S_I_N_G_.py,sha256=CFDy8R2fDeYn7ocfrZr7Ui7U9D0h4G55CdPfY55g-Bk,3317 +fontTools/ttLib/tables/S_T_A_T_.py,sha256=y9NiWCtnlZtMjw4K9_SdA84Xa-dJk7G5eb2dSe6ciWc,498 +fontTools/ttLib/tables/S_V_G_.py,sha256=vT6QTW5ArtskVUxnPEH_ZxKz4DF4v1pKbylN6DG0R3o,7676 +fontTools/ttLib/tables/S__i_l_f.py,sha256=lPQV2RdhcJRgfDzHp_dkgSxVUUdkcAnY1Bz7V18Gt9U,34985 +fontTools/ttLib/tables/S__i_l_l.py,sha256=Vjtn7SI83vaLGIuQf2e-jhZSFOXb9vXB4jwqznjqnMc,3224 +fontTools/ttLib/tables/T_S_I_B_.py,sha256=3WhEtyNnjYumcowD0GpjubrgnS-RzouZxCxEe4yLDo8,341 +fontTools/ttLib/tables/T_S_I_C_.py,sha256=hAV9Hq_ALsWaducDpw1tDRREvFL7hx7onnUF0sXTelU,381 +fontTools/ttLib/tables/T_S_I_D_.py,sha256=TsdX-G2xxVQO9sSE1wE_xDRx-gor5YiXTHeUthMwCPY,341 +fontTools/ttLib/tables/T_S_I_J_.py,sha256=x8Tlvi6aTxoQcI12UL7muoWF1Q61iBDseAS1mRdOYrg,341 +fontTools/ttLib/tables/T_S_I_P_.py,sha256=-il2ucTBOghVBY7cmleHdLZc3W3CKh7-iPPT0A3KBzk,341 +fontTools/ttLib/tables/T_S_I_S_.py,sha256=tVBnl63vyZUIq93oM6dEjHCXvPn9vt5vvL3jG59b0Lg,341 +fontTools/ttLib/tables/T_S_I_V_.py,sha256=iUWxz2MSrtw7mzuQZj30QAJrCPnyJ4GincFfySFUNAg,855 +fontTools/ttLib/tables/T_S_I__0.py,sha256=O-2oI0eBgt4mP15-UwH0_0r7YWi3EEEhG-4etqDueGI,2505 +fontTools/ttLib/tables/T_S_I__1.py,sha256=nSUhni-fvYmeKXW4zLfP3FG_3LQU2QKPKS1_gKY5lYg,6971 +fontTools/ttLib/tables/T_S_I__2.py,sha256=q2rub-d77iWWiBM6awO0-TCl-Xq7kalPobHYC2QEOfc,496 +fontTools/ttLib/tables/T_S_I__3.py,sha256=0LcvvCzVZJzyz7i4zjIkUuYXEqXwOCs9WeCsgDFqKJ8,543 +fontTools/ttLib/tables/T_S_I__5.py,sha256=hhvJn6jiXs8kuBtun8krNUTXTljH-eKxaxXM1T-7SXM,1905 +fontTools/ttLib/tables/T_T_F_A_.py,sha256=LuT0w__AMtawnsBMobhEMW9gp2yk0mA5ZRzwF45c0UI,392 +fontTools/ttLib/tables/TupleVariation.py,sha256=4XTDTRPZWPg9_1K5SVgdNoxtgQvahtiO4LNO7fk1cK4,32235 +fontTools/ttLib/tables/V_A_R_C_.py,sha256=3jFX50J6X-Cc4dwwiztKKsDTRXVHTXlVdQH328UN1-k,289 +fontTools/ttLib/tables/V_D_M_X_.py,sha256=RbHl7vvO9pcjT_kKvcCmcByQj39n4PmVeq55wD5C14g,10437 +fontTools/ttLib/tables/V_O_R_G_.py,sha256=Cn3OxjVtcO-Uvp61P5c2336V9iEbuGr6vWAXnSIaihk,5965 +fontTools/ttLib/tables/V_V_A_R_.py,sha256=Cstw6tc_U4-EmTriRItBSpvTJODAjMFQjfyTaxLzsbI,319 +fontTools/ttLib/tables/__init__.py,sha256=eQPcuHCfRuGtt6nOa0KwV6vtUNKHnwuQyA7xSN8SPoc,2651 +fontTools/ttLib/tables/_a_n_k_r.py,sha256=MpAzIifmIi_3gx2oP6PC3R2lu36Ewsr2-W1rXjsz2Ug,483 +fontTools/ttLib/tables/_a_v_a_r.py,sha256=QnjP8oO-hozKcI21_bRC-_SzZxVqcHC2VLDyNs4aH9U,7116 +fontTools/ttLib/tables/_b_s_l_n.py,sha256=_848o7SQqztzBDfHYei-80u9ltxIHVBzXu1dYHLV57M,465 +fontTools/ttLib/tables/_c_i_d_g.py,sha256=yt8rVIadpJSDUCoVH4dZetNiy0Azm5ESAxHjB2BX_eA,913 +fontTools/ttLib/tables/_c_m_a_p.py,sha256=r8-bB_E0EQh5h4TGX5nTnDnwTUtXuRB3iuqEDoN_IOM,62202 +fontTools/ttLib/tables/_c_v_a_r.py,sha256=35ayk2kX1pcLGwyx0y4I1l-r7LHgdKv0ulVx8oBPteI,3527 +fontTools/ttLib/tables/_c_v_t.py,sha256=1_RhEcTmhWQWQp7Hsj8UsByKmXCIppZyIbIArGywEEM,1618 +fontTools/ttLib/tables/_f_e_a_t.py,sha256=Fi1XnjhkCG0tp43AcvpIaivD-YRFpufo6feGIrenQDo,469 +fontTools/ttLib/tables/_f_p_g_m.py,sha256=uZHZzqL6OdLn_Hxskv-xf3XuE4fyaSv_jbALEjwXYug,1633 +fontTools/ttLib/tables/_f_v_a_r.py,sha256=rV33H2BgHUl3Wuydsou1G-Hi4uASBppWaLj3FMmiLjs,8837 +fontTools/ttLib/tables/_g_a_s_p.py,sha256=YvhAVDvdssN2fjPMTfSrO4WBCfTuh9T2cU5zquDVnSw,2203 +fontTools/ttLib/tables/_g_c_i_d.py,sha256=AJ4uV7PTHbnsw4Tfw8c2Ezh0VMox3oAH0qhhq7y8hdM,362 +fontTools/ttLib/tables/_g_l_y_f.py,sha256=dDV65llsEDI9fKcVKC5TOiaXpXSyMHNEytuYOGt7adM,85584 +fontTools/ttLib/tables/_g_v_a_r.py,sha256=sRA_ldRw_IggqTNuyS-IKY0rybj22AeNnYGMazGVZ6Y,12075 +fontTools/ttLib/tables/_h_d_m_x.py,sha256=wMrO4D04QNT8u30p8AV-aG3bndXCq4wlPNvtbd8ip7c,4252 +fontTools/ttLib/tables/_h_e_a_d.py,sha256=yY2GTFq6Mn6nN8EegbMVJRMUWIqDYFln3FhTk3ziw6s,4926 +fontTools/ttLib/tables/_h_h_e_a.py,sha256=X4t1aF1MZMuz3phCVSFwKcNTeoZdx-042wFtHc-nK9w,4767 +fontTools/ttLib/tables/_h_m_t_x.py,sha256=C_-GIrH8rHEqEQtsGeYTc6XLtLeu6ibRl8AAQxkQng8,6042 +fontTools/ttLib/tables/_k_e_r_n.py,sha256=DQNLmD_HEdDKPfp4tamOd9W3T5a1lXFM5tDaWrKl164,10794 +fontTools/ttLib/tables/_l_c_a_r.py,sha256=8W6xFOj-sm003MCXX4bIHxs9ntfVvT0FXYllPxa3z4I,390 +fontTools/ttLib/tables/_l_o_c_a.py,sha256=yxiwLKXLZjNju5XYmLb6EhNLec1d7ezEDDe1dszceHo,2180 +fontTools/ttLib/tables/_l_t_a_g.py,sha256=9YpApjI-rZ4e3HeT8Pj-osiHl3uALD9JXg5O7pqk9L0,2552 +fontTools/ttLib/tables/_m_a_x_p.py,sha256=cIDIZWse9czwwsnlxIh3qwgwaXbt7PQAjXKAcmMDspY,5264 +fontTools/ttLib/tables/_m_e_t_a.py,sha256=A0CZPEAVxYrpytjXUGQJCTddwG8KrvUVbtBe3A1MqgI,3913 +fontTools/ttLib/tables/_m_o_r_t.py,sha256=u35tYqn3cjzKxeCF0FUFeLtaf36mjDDSN08uuk0Kme8,487 +fontTools/ttLib/tables/_m_o_r_x.py,sha256=OwamVpIO7REDnFr95HuFPoY_0U6i9zQPb11K1sFTvDY,548 +fontTools/ttLib/tables/_n_a_m_e.py,sha256=LYySKOxLr_t5dYKeNTqA8pozf8MfI1R_jS0SCHNViq0,41063 +fontTools/ttLib/tables/_o_p_b_d.py,sha256=TNZv_2YTrj4dGzd6wA9Jb-KGZ99un177s5p3LlfxQ74,448 +fontTools/ttLib/tables/_p_o_s_t.py,sha256=hsGOLA10ZwaGSGpvQgeelnj1vj_kTUbiBnskgk2i35w,11701 +fontTools/ttLib/tables/_p_r_e_p.py,sha256=CcKr4HrswkupLmbJdrJLTM-z9XgLefQyv8467j9V0zs,427 +fontTools/ttLib/tables/_p_r_o_p.py,sha256=Eg8x5qWyXDzPezMafFu0s0qyPDHj-sPsFxGtE6h29qo,427 +fontTools/ttLib/tables/_s_b_i_x.py,sha256=tkkKbNKNYkUhZJuN0kl7q37x5KK5OovB06y28obPV6A,4865 +fontTools/ttLib/tables/_t_r_a_k.py,sha256=rrrPZLELFYA5F8PERoafIS9cb_d_i6xtpAzHEbsFHSw,11379 +fontTools/ttLib/tables/_v_h_e_a.py,sha256=FuULIBl4OQyUeLPOFEY8buB0pAnQhGa1-5a6kN9i5Sc,4459 +fontTools/ttLib/tables/_v_m_t_x.py,sha256=AUuxtyQvMWrTBNbOIaL6uKcB_DNpNb0YX28JIuTHw_Y,500 +fontTools/ttLib/tables/asciiTable.py,sha256=4c69jsAirUnDEpylf9CYBoCKTzwbmfbtUAOrtPnpHjY,637 +fontTools/ttLib/tables/grUtils.py,sha256=hcOJ5oJPOd2uJWnWA7qwR7AfL37YZ5zUT7g8o5BBV80,2270 +fontTools/ttLib/tables/otBase.py,sha256=cHdoYX-ICa8GeI-tVhFy1K9-CHaExiKO-HBjmH0OkbU,53330 +fontTools/ttLib/tables/otConverters.py,sha256=ihE_WMSKAKSaBbMvnFYDj2eMxf7PvRMMa8zGwfoYuYc,74202 +fontTools/ttLib/tables/otData.py,sha256=-XXRwdVfP-Wz7oBjMPpku0A0QH9lw_fFGNzZlt9N0mo,197262 +fontTools/ttLib/tables/otTables.py,sha256=2U04ot_2ITlBZx2QtpnIOtBGftPFs9ZX2FWfz4vz1G0,96987 +fontTools/ttLib/tables/otTraverse.py,sha256=HznEVAlVf_8eyqjsO2edgELtMlXnjnUqccK3PytvVUE,5518 +fontTools/ttLib/tables/sbixGlyph.py,sha256=tjEUPVRfx6gr5yme8UytGTtVrimKN5qmbzT1GZPjXiM,5796 +fontTools/ttLib/tables/sbixStrike.py,sha256=dL8O9K8R4S6RVQDP-PVjIPBrvbqbE9zwra0uRL0nLq0,6651 +fontTools/ttLib/tables/table_API_readme.txt,sha256=eZlRTLUkLzc_9Ot3pdfhyMb3ahU0_Iipx0vSbzOVGy8,2748 +fontTools/ttLib/tables/ttProgram.py,sha256=tgtxgd-EnOq-2PUlYEihp-6NHu_7HnE5rxeSAtmXOtU,35888 +fontTools/ttLib/ttCollection.py,sha256=aRph2MkBK3kd9-JCLqhJ1EN9pffN_lVX6WWmOTTewc8,3963 +fontTools/ttLib/ttFont.py,sha256=8I79ksIdtfMf8GV_nhawEhlzOvQMTyB98lrWzoJADh0,40669 +fontTools/ttLib/ttGlyphSet.py,sha256=cUBhMGa5hszeVqOm2KpOdeJh-LsiqE7RNdyIUPZ2vO8,17476 +fontTools/ttLib/ttVisitor.py,sha256=_tah4C42Tv6Pm9QeLNQwwVCxqI4VNEAqYCbmThp6cvY,1025 +fontTools/ttLib/woff2.py,sha256=6LPISeBQ1dubzKjWrUcYm_vgETC46BTLY4XkG52qvSA,60921 +fontTools/ttx.py,sha256=FxuGubujWCGJWSTrJEjoNH--25fVIPy-ZRtYy9H6iTk,17277 +fontTools/ufoLib/__init__.py,sha256=Q_yTPiqbbgFO50ephzOkE0AQITEu0IC24jvIv1dhxds,94339 +fontTools/ufoLib/converters.py,sha256=9j4BQ7EXVfWiVoB7OZCzRGsRJWi9G9w_27iFuiJNphw,13044 +fontTools/ufoLib/errors.py,sha256=9f8l5NaFAj3BZPa6Bbqt06FL4afffLuMzy4nPf-eOlE,845 +fontTools/ufoLib/etree.py,sha256=T3sjLTgjMAq6VyYRicWPaMIVBJ2YSuwZxV6Vc5yZtQI,231 +fontTools/ufoLib/filenames.py,sha256=o8Kuj0aOSlb8_hoLfJ2D91n9S5JZazFcTjiQYfqQ5ic,10418 +fontTools/ufoLib/glifLib.py,sha256=cUNEGB1xnJyiY4BqXkPjlkXQbB4wjjN4gYPnXpgyxmU,72699 +fontTools/ufoLib/kerning.py,sha256=QF5n7eXwBfc2TJ_XGUkLAnxcgq82vdLOpJEN2Lh_7UY,4233 +fontTools/ufoLib/plistlib.py,sha256=jzMGOGvHO6XvS-IO8hS04ur7r8-v2dnVq-vKMoJZvqQ,1510 +fontTools/ufoLib/pointPen.py,sha256=CuREcm3IYteZNBDAd_ZRAV4XqBsy0s07jdWc4en9r-8,244 +fontTools/ufoLib/utils.py,sha256=mN5GuY21fsr-9N1I3laaSm3IUhILTBlNySfM52p26gQ,1995 +fontTools/ufoLib/validators.py,sha256=EtVfe3fNZAS7q9Yd07S0pF1x_lonFxpYoqLpjiNIwL8,30813 +fontTools/unicode.py,sha256=ZZ7OMmWvIyV1IL1k6ioTzaRAh3tUvm6gvK7QgFbOIHY,1237 +fontTools/unicodedata/Blocks.py,sha256=6BL66vrr5UWylVx3bicy6U2kO-QcNNKpmPKQTGggUEU,32415 +fontTools/unicodedata/Mirrored.py,sha256=kdhwCWOWaArmfNkDah0Thv-67M9wWz45R5IMPhqyzFM,9242 +fontTools/unicodedata/OTTags.py,sha256=wOPpbMsNcp_gdvPFeITtgVMnTN8TJSNAsVEdu_nuPXE,1196 +fontTools/unicodedata/ScriptExtensions.py,sha256=YTZr2bOteHiz_7I4108PRy0Is4kFof-32yFuoKjFHjc,28207 +fontTools/unicodedata/Scripts.py,sha256=I0nY08ovsZ4pHU5wYchjat9DjnxcpoTzaXAn5oFaKNI,130271 +fontTools/unicodedata/__init__.py,sha256=ht5cIwvgKSonvRADzijXzBp6uZjhTvFobBwaba4ogaM,9033 +fontTools/varLib/__init__.py,sha256=dc1_8itv2pS3q6mO8vMuEe-XdWOiJLIWVsJ8-bhPflI,54236 +fontTools/varLib/__main__.py,sha256=wbdYC5bPjWCxA0I4SKcLO88gl-UMtsYS8MxdW9ySTkY,95 +fontTools/varLib/avar.py,sha256=Ye_u0HHznaPQaTzufNFKDj_v9o_LxOKJoa_eTK1D1F0,9647 +fontTools/varLib/avarPlanner.py,sha256=uLMGsL6cBbEMq5YItwABG_vXlXV3bxquM93WGDJ1brA,27358 +fontTools/varLib/builder.py,sha256=mSKOCcnnw-WzmZs15FayoqCDh77Ts7o9Tre9psh8CUc,6609 +fontTools/varLib/cff.py,sha256=EVgaQcoROIrYQsRuftnxFuGGldEPYbrIh5yBckylJC4,22901 +fontTools/varLib/errors.py,sha256=dMo8eGj76I7H4hrBEiNbYrGs2J1K1SwdsUyTHpkVOrQ,6934 +fontTools/varLib/featureVars.py,sha256=ZmHPyy4KuamR4bI1PfH-Umk4EN_CfwvNfchu7BOmECg,25695 +fontTools/varLib/hvar.py,sha256=1IvL5BneTkg8jJYicH0TSQViB6D0vBEesLdlfqoLBX4,3695 +fontTools/varLib/instancer/__init__.py,sha256=UxmXi4zpOGxSOMRaWQB-2IsI_tsZIK0O_T4AqXPJXZQ,72266 +fontTools/varLib/instancer/__main__.py,sha256=zfULwcP01FhplS1IlcMgNQnLxk5RVfmOuinWjqeid-g,104 +fontTools/varLib/instancer/featureVars.py,sha256=oPqSlnHLMDTtOsmQMi6gkzLox7ymCrqlRAkvC_EJ4bc,7110 +fontTools/varLib/instancer/names.py,sha256=IPRqel_M8zVU0jl30WsfgufxUm9PBBQDQCY3VHapeHc,14950 +fontTools/varLib/instancer/solver.py,sha256=uMePwX0BVT5F94kUvDglsI4_F0nEH67F7RFuJ6tQwQ0,11002 +fontTools/varLib/interpolatable.py,sha256=Bhlq_LhEZ-sXfLNY8aFEChFrsKuT2kzmnuMfG5qi0v4,45221 +fontTools/varLib/interpolatableHelpers.py,sha256=lXd7kwfIVl-4opd-vxCDhf48RnJ7IQKv_uuFQM_6vaU,11496 +fontTools/varLib/interpolatablePlot.py,sha256=w393P6mGLRhYkIjSxMww3qyoYxAUZzCXlmPBbI_84C0,44375 +fontTools/varLib/interpolatableTestContourOrder.py,sha256=mHJ9Ry7Rm7H3zHDwEUQEtEIDseiUzOxjg4MveW_FSiU,3021 +fontTools/varLib/interpolatableTestStartingPoint.py,sha256=K6OYKBspim6BXc91pfLTbGLyi5XZukfMuBc6hRpENG8,4296 +fontTools/varLib/interpolate_layout.py,sha256=22VjGZuV2YiAe2MpdTf0xPVz1x2G84bcOL0vOeBpGQM,3689 +fontTools/varLib/iup.c,sha256=M-eCV04EONUWonJxOEZxVlUqw4zWN6Iy7wpvOPOpMB4,826716 +fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so,sha256=1drw6v7iIsglS9zMgUpFSVLiCikWjoEedYsJwqRzJ5M,1568136 +fontTools/varLib/iup.py,sha256=mKq_GRWuUg4yTmw2V32nu0v2r-SzzN7xS7rIbV0mYuc,14984 +fontTools/varLib/merger.py,sha256=E59oli4AwqWZ-FgnuStMSBvsB-FHe-55esXTYUqGeJ8,60802 +fontTools/varLib/models.py,sha256=sj_ENljh_qcMbfYzRIOlRgHq6tFOmL02Wv6WO8uofis,22398 +fontTools/varLib/multiVarStore.py,sha256=eQEuWNY01YF5zDpy1UwNtvOYyD6c0FLxpH-QFpX1i78,8305 +fontTools/varLib/mutator.py,sha256=YJkKFFWjwpYZ1MrC7UZYJ1BuYTGiwgi7jHnpqNpKfKg,19278 +fontTools/varLib/mvar.py,sha256=LTV77vH_3Ecg_qKBO5xQzjLOlJir_ppEr7mPVZRgad8,2449 +fontTools/varLib/plot.py,sha256=NoSZkJ5ndxNcDvJIvd5pQ9_jX6X1oM1K2G_tR4sdPVs,7494 +fontTools/varLib/stat.py,sha256=XuNKKZxGlBrl4OGFDAwVXhpBwJi23U3BdHmNTKoJnvE,4811 +fontTools/varLib/varStore.py,sha256=2QA9SDI6jQyQ_zq82OOwa3FBkfl-ksaSo1KGmVFpa9Q,24069 +fontTools/voltLib/__init__.py,sha256=ZZ1AsTx1VlDn40Kupce-fM3meOWugy3RZraBW9LG-9M,151 +fontTools/voltLib/__main__.py,sha256=uVtABLzMeHtvKL8zetf4rpC4aB8BkYr5QLSegNjZZZI,5928 +fontTools/voltLib/ast.py,sha256=arA9W3Gqo6OqljwNNKnMAojz-C5LStbC5SgjJh7buKk,13300 +fontTools/voltLib/error.py,sha256=phcQOQj-xOspCXu9hBJQRhSOBDzxHRgZd3fWQOFNJzw,395 +fontTools/voltLib/lexer.py,sha256=OvuETOSvlS6v7iCVeJ3IdH2Cg71n3OJoEyiB3-h6vhE,3368 +fontTools/voltLib/parser.py,sha256=rkw2IHBZPsrhGVC7Kw7V501m0u52kh1JSM5HXp-xchM,25396 +fontTools/voltLib/voltToFea.py,sha256=Z2yvnaZLQXzPLT86Uta0zRsXIYgj6NnvZtSWt5xmw2s,36549 +fonttools-4.59.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +fonttools-4.59.0.dist-info/METADATA,sha256=LneEbtHYsAQHPecvKT4gNMv-qzS_AwQTJBmslrSnfvs,107891 +fonttools-4.59.0.dist-info/RECORD,, +fonttools-4.59.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fonttools-4.59.0.dist-info/WHEEL,sha256=DTnKjM5OInJxWADod3iQyWxWcdG-eRwxzGww236swpY,151 +fonttools-4.59.0.dist-info/entry_points.txt,sha256=8kVHddxfFWA44FSD4mBpmC-4uCynQnkoz_9aNJb227Y,147 +fonttools-4.59.0.dist-info/licenses/LICENSE,sha256=Z4cgj4P2Wcy8IiOy_elS_6b36KymLxqKK_W8UbsbI4M,1072 +fonttools-4.59.0.dist-info/licenses/LICENSE.external,sha256=lKg6ruBymg8wLTSsxKzsvZ1YNm8mJCkHX-VX5KVLLmk,20022 +fonttools-4.59.0.dist-info/top_level.txt,sha256=rRgRylrXzekqWOsrhygzib12pQ7WILf7UGjqEwkIFDM,10 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/REQUESTED b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/WHEEL b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d170d6d9582d145f12244c4135d9446d597e1029 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/entry_points.txt b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..87ae781f169a63f0cf672a9050474035bfa5add4 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +fonttools = fontTools.__main__:main +pyftmerge = fontTools.merge:main +pyftsubset = fontTools.subset:main +ttx = fontTools.ttx:main diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..cc633905d333c4b42c1a0c8b34e9f734adeb6e1e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Just van Rossum + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE.external b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE.external new file mode 100644 index 0000000000000000000000000000000000000000..5c45052f870940119ee6bc2b51ab3a7bc43d01d7 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/licenses/LICENSE.external @@ -0,0 +1,388 @@ +FontTools includes the following font projects for testing purposes, which are +under SIL Open Font License, Version 1.1: + +Lobster + Copyright (c) 2010, Pablo Impallari (www.impallari.com|impallari@gmail.com), + with Reserved Font Name Lobster. + This Font Software is licensed under the SIL Open Font License, Version 1.1. + +Noto Fonts + This Font Software is licensed under the SIL Open Font License, Version 1.1. + +XITS font project + Copyright (c) 2001-2010 by the STI Pub Companies, consisting of the American + Institute of Physics, the American Chemical Society, the American + Mathematical Society, the American Physical Society, Elsevier, Inc., and The + Institute of Electrical and Electronic Engineers, Inc. (www.stixfonts.org), + with Reserved Font Name STIX Fonts, STIX Fonts (TM) is a trademark of The + Institute of Electrical and Electronics Engineers, Inc. + + Portions copyright (c) 1998-2003 by MicroPress, Inc. + (www.micropress-inc.com), with Reserved Font Name TM Math. To obtain + additional mathematical fonts, please contact MicroPress, Inc., 68-30 Harrow + Street, Forest Hills, NY 11375, USA, Phone: (718) 575-1816. + + Portions copyright (c) 1990 by Elsevier, Inc. + + This Font Software is licensed under the SIL Open Font License, Version 1.1. + +Iosevka + Copyright (c) 2015-2020 Belleve Invis (belleve@typeof.net). + This Font Software is licensed under the SIL Open Font License, Version 1.1. + +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font +creation efforts of academic and linguistic communities, and to +provide a free and open framework in which fonts may be shared and +improved in partnership with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply to +any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software +components as distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, +deleting, or substituting -- in part or in whole -- any of the +components of the Original Version, by changing formats or by porting +the Font Software to a new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, +modify, redistribute, and sell modified and unmodified copies of the +Font Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, in +Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the +corresponding Copyright Holder. This restriction only applies to the +primary font name as presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created using +the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +===== + +FontTools includes Adobe AGL & AGLFN, which is under 3-clauses BSD license: + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +Neither the name of Adobe Systems Incorporated nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +===== + +FontTools includes cu2qu, which is Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0, a copy of which is reproduced below: + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +===== + +FontTools includes code in `fontTools.misc.filesystem` which is derived from: + +PyFilesystem2 (i.e. the `fs` package) by Will McGugan +Licensed under the MIT License +https://github.com/PyFilesystem/pyfilesystem2 + +Copyright (c) 2017-2021 The PyFilesystem2 contributors +Copyright (c) 2016-2019 Will McGugan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/top_level.txt b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..9af65ba39d292309497df4accdc44bd6f8143d10 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fonttools-4.59.0.dist-info/top_level.txt @@ -0,0 +1 @@ +fontTools diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..452c78a055e72a6d04f1013d1a98fda33fdc449e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/__init__.py @@ -0,0 +1,71 @@ +from . import caching +from ._version import __version__ # noqa: F401 +from .callbacks import Callback +from .compression import available_compressions +from .core import get_fs_token_paths, open, open_files, open_local, url_to_fs +from .exceptions import FSTimeoutError +from .mapping import FSMap, get_mapper +from .registry import ( + available_protocols, + filesystem, + get_filesystem_class, + register_implementation, + registry, +) +from .spec import AbstractFileSystem + +__all__ = [ + "AbstractFileSystem", + "FSTimeoutError", + "FSMap", + "filesystem", + "register_implementation", + "get_filesystem_class", + "get_fs_token_paths", + "get_mapper", + "open", + "open_files", + "open_local", + "registry", + "caching", + "Callback", + "available_protocols", + "available_compressions", + "url_to_fs", +] + + +def process_entries(): + try: + from importlib.metadata import entry_points + except ImportError: + return + if entry_points is not None: + try: + eps = entry_points() + except TypeError: + pass # importlib-metadata < 0.8 + else: + if hasattr(eps, "select"): # Python 3.10+ / importlib_metadata >= 3.9.0 + specs = eps.select(group="fsspec.specs") + else: + specs = eps.get("fsspec.specs", []) + registered_names = {} + for spec in specs: + err_msg = f"Unable to load filesystem from {spec}" + name = spec.name + if name in registered_names: + continue + registered_names[name] = True + register_implementation( + name, + spec.value.replace(":", "."), + errtxt=err_msg, + # We take our implementations as the ones to overload with if + # for some reason we encounter some, may be the same, already + # registered + clobber=True, + ) + + +process_entries() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/_version.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..fa46f9e465e2ef9bf881760a6ce215414c4548eb --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/_version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '2025.7.0' +__version_tuple__ = version_tuple = (2025, 7, 0) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/archive.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/archive.py new file mode 100644 index 0000000000000000000000000000000000000000..13a4da8df7c9405297cdd7d37476be2f725b2f57 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/archive.py @@ -0,0 +1,75 @@ +import operator + +from fsspec import AbstractFileSystem +from fsspec.utils import tokenize + + +class AbstractArchiveFileSystem(AbstractFileSystem): + """ + A generic superclass for implementing Archive-based filesystems. + + Currently, it is shared amongst + :class:`~fsspec.implementations.zip.ZipFileSystem`, + :class:`~fsspec.implementations.libarchive.LibArchiveFileSystem` and + :class:`~fsspec.implementations.tar.TarFileSystem`. + """ + + def __str__(self): + return f"" + + __repr__ = __str__ + + def ukey(self, path): + return tokenize(path, self.fo, self.protocol) + + def _all_dirnames(self, paths): + """Returns *all* directory names for each path in paths, including intermediate + ones. + + Parameters + ---------- + paths: Iterable of path strings + """ + if len(paths) == 0: + return set() + + dirnames = {self._parent(path) for path in paths} - {self.root_marker} + return dirnames | self._all_dirnames(dirnames) + + def info(self, path, **kwargs): + self._get_dirs() + path = self._strip_protocol(path) + if path in {"", "/"} and self.dir_cache: + return {"name": "", "type": "directory", "size": 0} + if path in self.dir_cache: + return self.dir_cache[path] + elif path + "/" in self.dir_cache: + return self.dir_cache[path + "/"] + else: + raise FileNotFoundError(path) + + def ls(self, path, detail=True, **kwargs): + self._get_dirs() + paths = {} + for p, f in self.dir_cache.items(): + p = p.rstrip("/") + if "/" in p: + root = p.rsplit("/", 1)[0] + else: + root = "" + if root == path.rstrip("/"): + paths[p] = f + elif all( + (a == b) + for a, b in zip(path.split("/"), [""] + p.strip("/").split("/")) + ): + # root directory entry + ppath = p.rstrip("/").split("/", 1)[0] + if ppath not in paths: + out = {"name": ppath, "size": 0, "type": "directory"} + paths[ppath] = out + if detail: + out = sorted(paths.values(), key=operator.itemgetter("name")) + return out + else: + return sorted(paths) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/asyn.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/asyn.py new file mode 100644 index 0000000000000000000000000000000000000000..83772839450ec74a8fb37f9be323341f948e3748 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/asyn.py @@ -0,0 +1,1097 @@ +import asyncio +import asyncio.events +import functools +import inspect +import io +import numbers +import os +import re +import threading +from collections.abc import Iterable +from glob import has_magic +from typing import TYPE_CHECKING + +from .callbacks import DEFAULT_CALLBACK +from .exceptions import FSTimeoutError +from .implementations.local import LocalFileSystem, make_path_posix, trailing_sep +from .spec import AbstractBufferedFile, AbstractFileSystem +from .utils import glob_translate, is_exception, other_paths + +private = re.compile("_[^_]") +iothread = [None] # dedicated fsspec IO thread +loop = [None] # global event loop for any non-async instance +_lock = None # global lock placeholder +get_running_loop = asyncio.get_running_loop + + +def get_lock(): + """Allocate or return a threading lock. + + The lock is allocated on first use to allow setting one lock per forked process. + """ + global _lock + if not _lock: + _lock = threading.Lock() + return _lock + + +def reset_lock(): + """Reset the global lock. + + This should be called only on the init of a forked process to reset the lock to + None, enabling the new forked process to get a new lock. + """ + global _lock + + iothread[0] = None + loop[0] = None + _lock = None + + +async def _runner(event, coro, result, timeout=None): + timeout = timeout if timeout else None # convert 0 or 0.0 to None + if timeout is not None: + coro = asyncio.wait_for(coro, timeout=timeout) + try: + result[0] = await coro + except Exception as ex: + result[0] = ex + finally: + event.set() + + +def sync(loop, func, *args, timeout=None, **kwargs): + """ + Make loop run coroutine until it returns. Runs in other thread + + Examples + -------- + >>> fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, + timeout=timeout, **kwargs) + """ + timeout = timeout if timeout else None # convert 0 or 0.0 to None + # NB: if the loop is not running *yet*, it is OK to submit work + # and we will wait for it + if loop is None or loop.is_closed(): + raise RuntimeError("Loop is not running") + try: + loop0 = asyncio.events.get_running_loop() + if loop0 is loop: + raise NotImplementedError("Calling sync() from within a running loop") + except NotImplementedError: + raise + except RuntimeError: + pass + coro = func(*args, **kwargs) + result = [None] + event = threading.Event() + asyncio.run_coroutine_threadsafe(_runner(event, coro, result, timeout), loop) + while True: + # this loops allows thread to get interrupted + if event.wait(1): + break + if timeout is not None: + timeout -= 1 + if timeout < 0: + raise FSTimeoutError + + return_result = result[0] + if isinstance(return_result, asyncio.TimeoutError): + # suppress asyncio.TimeoutError, raise FSTimeoutError + raise FSTimeoutError from return_result + elif isinstance(return_result, BaseException): + raise return_result + else: + return return_result + + +def sync_wrapper(func, obj=None): + """Given a function, make so can be called in blocking contexts + + Leave obj=None if defining within a class. Pass the instance if attaching + as an attribute of the instance. + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + self = obj or args[0] + return sync(self.loop, func, *args, **kwargs) + + return wrapper + + +def get_loop(): + """Create or return the default fsspec IO loop + + The loop will be running on a separate thread. + """ + if loop[0] is None: + with get_lock(): + # repeat the check just in case the loop got filled between the + # previous two calls from another thread + if loop[0] is None: + loop[0] = asyncio.new_event_loop() + th = threading.Thread(target=loop[0].run_forever, name="fsspecIO") + th.daemon = True + th.start() + iothread[0] = th + return loop[0] + + +def reset_after_fork(): + global lock + loop[0] = None + iothread[0] = None + lock = None + + +if hasattr(os, "register_at_fork"): + # should be posix; this will do nothing for spawn or forkserver subprocesses + os.register_at_fork(after_in_child=reset_after_fork) + + +if TYPE_CHECKING: + import resource + + ResourceError = resource.error +else: + try: + import resource + except ImportError: + resource = None + ResourceError = OSError + else: + ResourceError = getattr(resource, "error", OSError) + +_DEFAULT_BATCH_SIZE = 128 +_NOFILES_DEFAULT_BATCH_SIZE = 1280 + + +def _get_batch_size(nofiles=False): + from fsspec.config import conf + + if nofiles: + if "nofiles_gather_batch_size" in conf: + return conf["nofiles_gather_batch_size"] + else: + if "gather_batch_size" in conf: + return conf["gather_batch_size"] + if nofiles: + return _NOFILES_DEFAULT_BATCH_SIZE + if resource is None: + return _DEFAULT_BATCH_SIZE + + try: + soft_limit, _ = resource.getrlimit(resource.RLIMIT_NOFILE) + except (ImportError, ValueError, ResourceError): + return _DEFAULT_BATCH_SIZE + + if soft_limit == resource.RLIM_INFINITY: + return -1 + else: + return soft_limit // 8 + + +def running_async() -> bool: + """Being executed by an event loop?""" + try: + asyncio.get_running_loop() + return True + except RuntimeError: + return False + + +async def _run_coros_in_chunks( + coros, + batch_size=None, + callback=DEFAULT_CALLBACK, + timeout=None, + return_exceptions=False, + nofiles=False, +): + """Run the given coroutines in chunks. + + Parameters + ---------- + coros: list of coroutines to run + batch_size: int or None + Number of coroutines to submit/wait on simultaneously. + If -1, then it will not be any throttling. If + None, it will be inferred from _get_batch_size() + callback: fsspec.callbacks.Callback instance + Gets a relative_update when each coroutine completes + timeout: number or None + If given, each coroutine times out after this time. Note that, since + there are multiple batches, the total run time of this function will in + general be longer + return_exceptions: bool + Same meaning as in asyncio.gather + nofiles: bool + If inferring the batch_size, does this operation involve local files? + If yes, you normally expect smaller batches. + """ + + if batch_size is None: + batch_size = _get_batch_size(nofiles=nofiles) + + if batch_size == -1: + batch_size = len(coros) + + assert batch_size > 0 + + async def _run_coro(coro, i): + try: + return await asyncio.wait_for(coro, timeout=timeout), i + except Exception as e: + if not return_exceptions: + raise + return e, i + finally: + callback.relative_update(1) + + i = 0 + n = len(coros) + results = [None] * n + pending = set() + + while pending or i < n: + while len(pending) < batch_size and i < n: + pending.add(asyncio.ensure_future(_run_coro(coros[i], i))) + i += 1 + + if not pending: + break + + done, pending = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED) + while done: + result, k = await done.pop() + results[k] = result + + return results + + +# these methods should be implemented as async by any async-able backend +async_methods = [ + "_ls", + "_cat_file", + "_get_file", + "_put_file", + "_rm_file", + "_cp_file", + "_pipe_file", + "_expand_path", + "_info", + "_isfile", + "_isdir", + "_exists", + "_walk", + "_glob", + "_find", + "_du", + "_size", + "_mkdir", + "_makedirs", +] + + +class AsyncFileSystem(AbstractFileSystem): + """Async file operations, default implementations + + Passes bulk operations to asyncio.gather for concurrent operation. + + Implementations that have concurrent batch operations and/or async methods + should inherit from this class instead of AbstractFileSystem. Docstrings are + copied from the un-underscored method in AbstractFileSystem, if not given. + """ + + # note that methods do not have docstring here; they will be copied + # for _* methods and inferred for overridden methods. + + async_impl = True + mirror_sync_methods = True + disable_throttling = False + + def __init__(self, *args, asynchronous=False, loop=None, batch_size=None, **kwargs): + self.asynchronous = asynchronous + self._pid = os.getpid() + if not asynchronous: + self._loop = loop or get_loop() + else: + self._loop = None + self.batch_size = batch_size + super().__init__(*args, **kwargs) + + @property + def loop(self): + if self._pid != os.getpid(): + raise RuntimeError("This class is not fork-safe") + return self._loop + + async def _rm_file(self, path, **kwargs): + raise NotImplementedError + + async def _rm(self, path, recursive=False, batch_size=None, **kwargs): + # TODO: implement on_error + batch_size = batch_size or self.batch_size + path = await self._expand_path(path, recursive=recursive) + return await _run_coros_in_chunks( + [self._rm_file(p, **kwargs) for p in reversed(path)], + batch_size=batch_size, + nofiles=True, + ) + + async def _cp_file(self, path1, path2, **kwargs): + raise NotImplementedError + + async def _mv_file(self, path1, path2): + await self._cp_file(path1, path2) + await self._rm_file(path1) + + async def _copy( + self, + path1, + path2, + recursive=False, + on_error=None, + maxdepth=None, + batch_size=None, + **kwargs, + ): + if on_error is None and recursive: + on_error = "ignore" + elif on_error is None: + on_error = "raise" + + if isinstance(path1, list) and isinstance(path2, list): + # No need to expand paths when both source and destination + # are provided as lists + paths1 = path1 + paths2 = path2 + else: + source_is_str = isinstance(path1, str) + paths1 = await self._expand_path( + path1, maxdepth=maxdepth, recursive=recursive + ) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + paths1 = [ + p for p in paths1 if not (trailing_sep(p) or await self._isdir(p)) + ] + if not paths1: + return + + source_is_file = len(paths1) == 1 + dest_is_dir = isinstance(path2, str) and ( + trailing_sep(path2) or await self._isdir(path2) + ) + + exists = source_is_str and ( + (has_magic(path1) and source_is_file) + or (not has_magic(path1) and dest_is_dir and not trailing_sep(path1)) + ) + paths2 = other_paths( + paths1, + path2, + exists=exists, + flatten=not source_is_str, + ) + + batch_size = batch_size or self.batch_size + coros = [self._cp_file(p1, p2, **kwargs) for p1, p2 in zip(paths1, paths2)] + result = await _run_coros_in_chunks( + coros, batch_size=batch_size, return_exceptions=True, nofiles=True + ) + + for ex in filter(is_exception, result): + if on_error == "ignore" and isinstance(ex, FileNotFoundError): + continue + raise ex + + async def _pipe_file(self, path, value, mode="overwrite", **kwargs): + raise NotImplementedError + + async def _pipe(self, path, value=None, batch_size=None, **kwargs): + if isinstance(path, str): + path = {path: value} + batch_size = batch_size or self.batch_size + return await _run_coros_in_chunks( + [self._pipe_file(k, v, **kwargs) for k, v in path.items()], + batch_size=batch_size, + nofiles=True, + ) + + async def _process_limits(self, url, start, end): + """Helper for "Range"-based _cat_file""" + size = None + suff = False + if start is not None and start < 0: + # if start is negative and end None, end is the "suffix length" + if end is None: + end = -start + start = "" + suff = True + else: + size = size or (await self._info(url))["size"] + start = size + start + elif start is None: + start = 0 + if not suff: + if end is not None and end < 0: + if start is not None: + size = size or (await self._info(url))["size"] + end = size + end + elif end is None: + end = "" + if isinstance(end, numbers.Integral): + end -= 1 # bytes range is inclusive + return f"bytes={start}-{end}" + + async def _cat_file(self, path, start=None, end=None, **kwargs): + raise NotImplementedError + + async def _cat( + self, path, recursive=False, on_error="raise", batch_size=None, **kwargs + ): + paths = await self._expand_path(path, recursive=recursive) + coros = [self._cat_file(path, **kwargs) for path in paths] + batch_size = batch_size or self.batch_size + out = await _run_coros_in_chunks( + coros, batch_size=batch_size, nofiles=True, return_exceptions=True + ) + if on_error == "raise": + ex = next(filter(is_exception, out), False) + if ex: + raise ex + if ( + len(paths) > 1 + or isinstance(path, list) + or paths[0] != self._strip_protocol(path) + ): + return { + k: v + for k, v in zip(paths, out) + if on_error != "omit" or not is_exception(v) + } + else: + return out[0] + + async def _cat_ranges( + self, + paths, + starts, + ends, + max_gap=None, + batch_size=None, + on_error="return", + **kwargs, + ): + """Get the contents of byte ranges from one or more files + + Parameters + ---------- + paths: list + A list of of filepaths on this filesystems + starts, ends: int or list + Bytes limits of the read. If using a single int, the same value will be + used to read all the specified files. + """ + # TODO: on_error + if max_gap is not None: + # use utils.merge_offset_ranges + raise NotImplementedError + if not isinstance(paths, list): + raise TypeError + if not isinstance(starts, Iterable): + starts = [starts] * len(paths) + if not isinstance(ends, Iterable): + ends = [ends] * len(paths) + if len(starts) != len(paths) or len(ends) != len(paths): + raise ValueError + coros = [ + self._cat_file(p, start=s, end=e, **kwargs) + for p, s, e in zip(paths, starts, ends) + ] + batch_size = batch_size or self.batch_size + return await _run_coros_in_chunks( + coros, batch_size=batch_size, nofiles=True, return_exceptions=True + ) + + async def _put_file(self, lpath, rpath, mode="overwrite", **kwargs): + raise NotImplementedError + + async def _put( + self, + lpath, + rpath, + recursive=False, + callback=DEFAULT_CALLBACK, + batch_size=None, + maxdepth=None, + **kwargs, + ): + """Copy file(s) from local. + + Copies a specific file or tree of files (if recursive=True). If rpath + ends with a "/", it will be assumed to be a directory, and target files + will go within. + + The put_file method will be called concurrently on a batch of files. The + batch_size option can configure the amount of futures that can be executed + at the same time. If it is -1, then all the files will be uploaded concurrently. + The default can be set for this instance by passing "batch_size" in the + constructor, or for all instances by setting the "gather_batch_size" key + in ``fsspec.config.conf``, falling back to 1/8th of the system limit . + """ + if isinstance(lpath, list) and isinstance(rpath, list): + # No need to expand paths when both source and destination + # are provided as lists + rpaths = rpath + lpaths = lpath + else: + source_is_str = isinstance(lpath, str) + if source_is_str: + lpath = make_path_posix(lpath) + fs = LocalFileSystem() + lpaths = fs.expand_path(lpath, recursive=recursive, maxdepth=maxdepth) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + lpaths = [p for p in lpaths if not (trailing_sep(p) or fs.isdir(p))] + if not lpaths: + return + + source_is_file = len(lpaths) == 1 + dest_is_dir = isinstance(rpath, str) and ( + trailing_sep(rpath) or await self._isdir(rpath) + ) + + rpath = self._strip_protocol(rpath) + exists = source_is_str and ( + (has_magic(lpath) and source_is_file) + or (not has_magic(lpath) and dest_is_dir and not trailing_sep(lpath)) + ) + rpaths = other_paths( + lpaths, + rpath, + exists=exists, + flatten=not source_is_str, + ) + + is_dir = {l: os.path.isdir(l) for l in lpaths} + rdirs = [r for l, r in zip(lpaths, rpaths) if is_dir[l]] + file_pairs = [(l, r) for l, r in zip(lpaths, rpaths) if not is_dir[l]] + + await asyncio.gather(*[self._makedirs(d, exist_ok=True) for d in rdirs]) + batch_size = batch_size or self.batch_size + + coros = [] + callback.set_size(len(file_pairs)) + for lfile, rfile in file_pairs: + put_file = callback.branch_coro(self._put_file) + coros.append(put_file(lfile, rfile, **kwargs)) + + return await _run_coros_in_chunks( + coros, batch_size=batch_size, callback=callback + ) + + async def _get_file(self, rpath, lpath, **kwargs): + raise NotImplementedError + + async def _get( + self, + rpath, + lpath, + recursive=False, + callback=DEFAULT_CALLBACK, + maxdepth=None, + **kwargs, + ): + """Copy file(s) to local. + + Copies a specific file or tree of files (if recursive=True). If lpath + ends with a "/", it will be assumed to be a directory, and target files + will go within. Can submit a list of paths, which may be glob-patterns + and will be expanded. + + The get_file method will be called concurrently on a batch of files. The + batch_size option can configure the amount of futures that can be executed + at the same time. If it is -1, then all the files will be uploaded concurrently. + The default can be set for this instance by passing "batch_size" in the + constructor, or for all instances by setting the "gather_batch_size" key + in ``fsspec.config.conf``, falling back to 1/8th of the system limit . + """ + if isinstance(lpath, list) and isinstance(rpath, list): + # No need to expand paths when both source and destination + # are provided as lists + rpaths = rpath + lpaths = lpath + else: + source_is_str = isinstance(rpath, str) + # First check for rpath trailing slash as _strip_protocol removes it. + source_not_trailing_sep = source_is_str and not trailing_sep(rpath) + rpath = self._strip_protocol(rpath) + rpaths = await self._expand_path( + rpath, recursive=recursive, maxdepth=maxdepth + ) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + rpaths = [ + p for p in rpaths if not (trailing_sep(p) or await self._isdir(p)) + ] + if not rpaths: + return + + lpath = make_path_posix(lpath) + source_is_file = len(rpaths) == 1 + dest_is_dir = isinstance(lpath, str) and ( + trailing_sep(lpath) or LocalFileSystem().isdir(lpath) + ) + + exists = source_is_str and ( + (has_magic(rpath) and source_is_file) + or (not has_magic(rpath) and dest_is_dir and source_not_trailing_sep) + ) + lpaths = other_paths( + rpaths, + lpath, + exists=exists, + flatten=not source_is_str, + ) + + [os.makedirs(os.path.dirname(lp), exist_ok=True) for lp in lpaths] + batch_size = kwargs.pop("batch_size", self.batch_size) + + coros = [] + callback.set_size(len(lpaths)) + for lpath, rpath in zip(lpaths, rpaths): + get_file = callback.branch_coro(self._get_file) + coros.append(get_file(rpath, lpath, **kwargs)) + return await _run_coros_in_chunks( + coros, batch_size=batch_size, callback=callback + ) + + async def _isfile(self, path): + try: + return (await self._info(path))["type"] == "file" + except: # noqa: E722 + return False + + async def _isdir(self, path): + try: + return (await self._info(path))["type"] == "directory" + except OSError: + return False + + async def _size(self, path): + return (await self._info(path)).get("size", None) + + async def _sizes(self, paths, batch_size=None): + batch_size = batch_size or self.batch_size + return await _run_coros_in_chunks( + [self._size(p) for p in paths], batch_size=batch_size + ) + + async def _exists(self, path, **kwargs): + try: + await self._info(path, **kwargs) + return True + except FileNotFoundError: + return False + + async def _info(self, path, **kwargs): + raise NotImplementedError + + async def _ls(self, path, detail=True, **kwargs): + raise NotImplementedError + + async def _walk(self, path, maxdepth=None, on_error="omit", **kwargs): + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + path = self._strip_protocol(path) + full_dirs = {} + dirs = {} + files = {} + + detail = kwargs.pop("detail", False) + try: + listing = await self._ls(path, detail=True, **kwargs) + except (FileNotFoundError, OSError) as e: + if on_error == "raise": + raise + elif callable(on_error): + on_error(e) + if detail: + yield path, {}, {} + else: + yield path, [], [] + return + + for info in listing: + # each info name must be at least [path]/part , but here + # we check also for names like [path]/part/ + pathname = info["name"].rstrip("/") + name = pathname.rsplit("/", 1)[-1] + if info["type"] == "directory" and pathname != path: + # do not include "self" path + full_dirs[name] = pathname + dirs[name] = info + elif pathname == path: + # file-like with same name as give path + files[""] = info + else: + files[name] = info + + if detail: + yield path, dirs, files + else: + yield path, list(dirs), list(files) + + if maxdepth is not None: + maxdepth -= 1 + if maxdepth < 1: + return + + for d in dirs: + async for _ in self._walk( + full_dirs[d], maxdepth=maxdepth, detail=detail, **kwargs + ): + yield _ + + async def _glob(self, path, maxdepth=None, **kwargs): + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + import re + + seps = (os.path.sep, os.path.altsep) if os.path.altsep else (os.path.sep,) + ends_with_sep = path.endswith(seps) # _strip_protocol strips trailing slash + path = self._strip_protocol(path) + append_slash_to_dirname = ends_with_sep or path.endswith( + tuple(sep + "**" for sep in seps) + ) + idx_star = path.find("*") if path.find("*") >= 0 else len(path) + idx_qmark = path.find("?") if path.find("?") >= 0 else len(path) + idx_brace = path.find("[") if path.find("[") >= 0 else len(path) + + min_idx = min(idx_star, idx_qmark, idx_brace) + + detail = kwargs.pop("detail", False) + + if not has_magic(path): + if await self._exists(path, **kwargs): + if not detail: + return [path] + else: + return {path: await self._info(path, **kwargs)} + else: + if not detail: + return [] # glob of non-existent returns empty + else: + return {} + elif "/" in path[:min_idx]: + min_idx = path[:min_idx].rindex("/") + root = path[: min_idx + 1] + depth = path[min_idx + 1 :].count("/") + 1 + else: + root = "" + depth = path[min_idx + 1 :].count("/") + 1 + + if "**" in path: + if maxdepth is not None: + idx_double_stars = path.find("**") + depth_double_stars = path[idx_double_stars:].count("/") + 1 + depth = depth - depth_double_stars + maxdepth + else: + depth = None + + allpaths = await self._find( + root, maxdepth=depth, withdirs=True, detail=True, **kwargs + ) + + pattern = glob_translate(path + ("/" if ends_with_sep else "")) + pattern = re.compile(pattern) + + out = { + p: info + for p, info in sorted(allpaths.items()) + if pattern.match( + p + "/" + if append_slash_to_dirname and info["type"] == "directory" + else p + ) + } + + if detail: + return out + else: + return list(out) + + async def _du(self, path, total=True, maxdepth=None, **kwargs): + sizes = {} + # async for? + for f in await self._find(path, maxdepth=maxdepth, **kwargs): + info = await self._info(f) + sizes[info["name"]] = info["size"] + if total: + return sum(sizes.values()) + else: + return sizes + + async def _find(self, path, maxdepth=None, withdirs=False, **kwargs): + path = self._strip_protocol(path) + out = {} + detail = kwargs.pop("detail", False) + + # Add the root directory if withdirs is requested + # This is needed for posix glob compliance + if withdirs and path != "" and await self._isdir(path): + out[path] = await self._info(path) + + # async for? + async for _, dirs, files in self._walk(path, maxdepth, detail=True, **kwargs): + if withdirs: + files.update(dirs) + out.update({info["name"]: info for name, info in files.items()}) + if not out and (await self._isfile(path)): + # walk works on directories, but find should also return [path] + # when path happens to be a file + out[path] = {} + names = sorted(out) + if not detail: + return names + else: + return {name: out[name] for name in names} + + async def _expand_path(self, path, recursive=False, maxdepth=None): + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + if isinstance(path, str): + out = await self._expand_path([path], recursive, maxdepth) + else: + out = set() + path = [self._strip_protocol(p) for p in path] + for p in path: # can gather here + if has_magic(p): + bit = set(await self._glob(p, maxdepth=maxdepth)) + out |= bit + if recursive: + # glob call above expanded one depth so if maxdepth is defined + # then decrement it in expand_path call below. If it is zero + # after decrementing then avoid expand_path call. + if maxdepth is not None and maxdepth <= 1: + continue + out |= set( + await self._expand_path( + list(bit), + recursive=recursive, + maxdepth=maxdepth - 1 if maxdepth is not None else None, + ) + ) + continue + elif recursive: + rec = set(await self._find(p, maxdepth=maxdepth, withdirs=True)) + out |= rec + if p not in out and (recursive is False or (await self._exists(p))): + # should only check once, for the root + out.add(p) + if not out: + raise FileNotFoundError(path) + return sorted(out) + + async def _mkdir(self, path, create_parents=True, **kwargs): + pass # not necessary to implement, may not have directories + + async def _makedirs(self, path, exist_ok=False): + pass # not necessary to implement, may not have directories + + async def open_async(self, path, mode="rb", **kwargs): + if "b" not in mode or kwargs.get("compression"): + raise ValueError + raise NotImplementedError + + +def mirror_sync_methods(obj): + """Populate sync and async methods for obj + + For each method will create a sync version if the name refers to an async method + (coroutine) and there is no override in the child class; will create an async + method for the corresponding sync method if there is no implementation. + + Uses the methods specified in + - async_methods: the set that an implementation is expected to provide + - default_async_methods: that can be derived from their sync version in + AbstractFileSystem + - AsyncFileSystem: async-specific default coroutines + """ + from fsspec import AbstractFileSystem + + for method in async_methods + dir(AsyncFileSystem): + if not method.startswith("_"): + continue + smethod = method[1:] + if private.match(method): + isco = inspect.iscoroutinefunction(getattr(obj, method, None)) + unsync = getattr(getattr(obj, smethod, False), "__func__", None) + is_default = unsync is getattr(AbstractFileSystem, smethod, "") + if isco and is_default: + mth = sync_wrapper(getattr(obj, method), obj=obj) + setattr(obj, smethod, mth) + if not mth.__doc__: + mth.__doc__ = getattr( + getattr(AbstractFileSystem, smethod, None), "__doc__", "" + ) + + +class FSSpecCoroutineCancel(Exception): + pass + + +def _dump_running_tasks( + printout=True, cancel=True, exc=FSSpecCoroutineCancel, with_task=False +): + import traceback + + tasks = [t for t in asyncio.tasks.all_tasks(loop[0]) if not t.done()] + if printout: + [task.print_stack() for task in tasks] + out = [ + { + "locals": task._coro.cr_frame.f_locals, + "file": task._coro.cr_frame.f_code.co_filename, + "firstline": task._coro.cr_frame.f_code.co_firstlineno, + "linelo": task._coro.cr_frame.f_lineno, + "stack": traceback.format_stack(task._coro.cr_frame), + "task": task if with_task else None, + } + for task in tasks + ] + if cancel: + for t in tasks: + cbs = t._callbacks + t.cancel() + asyncio.futures.Future.set_exception(t, exc) + asyncio.futures.Future.cancel(t) + [cb[0](t) for cb in cbs] # cancels any dependent concurrent.futures + try: + t._coro.throw(exc) # exits coro, unless explicitly handled + except exc: + pass + return out + + +class AbstractAsyncStreamedFile(AbstractBufferedFile): + # no read buffering, and always auto-commit + # TODO: readahead might still be useful here, but needs async version + + async def read(self, length=-1): + """ + Return data from cache, or fetch pieces as necessary + + Parameters + ---------- + length: int (-1) + Number of bytes to read; if <0, all remaining bytes. + """ + length = -1 if length is None else int(length) + if self.mode != "rb": + raise ValueError("File not in read mode") + if length < 0: + length = self.size - self.loc + if self.closed: + raise ValueError("I/O operation on closed file.") + if length == 0: + # don't even bother calling fetch + return b"" + out = await self._fetch_range(self.loc, self.loc + length) + self.loc += len(out) + return out + + async def write(self, data): + """ + Write data to buffer. + + Buffer only sent on flush() or if buffer is greater than + or equal to blocksize. + + Parameters + ---------- + data: bytes + Set of bytes to be written. + """ + if self.mode not in {"wb", "ab"}: + raise ValueError("File not in write mode") + if self.closed: + raise ValueError("I/O operation on closed file.") + if self.forced: + raise ValueError("This file has been force-flushed, can only close") + out = self.buffer.write(data) + self.loc += out + if self.buffer.tell() >= self.blocksize: + await self.flush() + return out + + async def close(self): + """Close file + + Finalizes writes, discards cache + """ + if getattr(self, "_unclosable", False): + return + if self.closed: + return + if self.mode == "rb": + self.cache = None + else: + if not self.forced: + await self.flush(force=True) + + if self.fs is not None: + self.fs.invalidate_cache(self.path) + self.fs.invalidate_cache(self.fs._parent(self.path)) + + self.closed = True + + async def flush(self, force=False): + if self.closed: + raise ValueError("Flush on closed file") + if force and self.forced: + raise ValueError("Force flush cannot be called more than once") + if force: + self.forced = True + + if self.mode not in {"wb", "ab"}: + # no-op to flush on read-mode + return + + if not force and self.buffer.tell() < self.blocksize: + # Defer write on small block + return + + if self.offset is None: + # Initialize a multipart upload + self.offset = 0 + try: + await self._initiate_upload() + except: + self.closed = True + raise + + if await self._upload_chunk(final=force) is not False: + self.offset += self.buffer.seek(0, 2) + self.buffer = io.BytesIO() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + + async def _fetch_range(self, start, end): + raise NotImplementedError + + async def _initiate_upload(self): + pass + + async def _upload_chunk(self, final=False): + raise NotImplementedError diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/caching.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/caching.py new file mode 100644 index 0000000000000000000000000000000000000000..de6a4e3407a62a1c2123bf2b4a81afb96c54150a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/caching.py @@ -0,0 +1,1004 @@ +from __future__ import annotations + +import collections +import functools +import logging +import math +import os +import threading +import warnings +from collections import OrderedDict +from concurrent.futures import Future, ThreadPoolExecutor +from itertools import groupby +from operator import itemgetter +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Generic, + NamedTuple, + TypeVar, +) + +if TYPE_CHECKING: + import mmap + + from typing_extensions import ParamSpec + + P = ParamSpec("P") +else: + P = TypeVar("P") + +T = TypeVar("T") + + +logger = logging.getLogger("fsspec") + +Fetcher = Callable[[int, int], bytes] # Maps (start, end) to bytes +MultiFetcher = Callable[[list[int, int]], bytes] # Maps [(start, end)] to bytes + + +class BaseCache: + """Pass-though cache: doesn't keep anything, calls every time + + Acts as base class for other cachers + + Parameters + ---------- + blocksize: int + How far to read ahead in numbers of bytes + fetcher: func + Function of the form f(start, end) which gets bytes from remote as + specified + size: int + How big this file is + """ + + name: ClassVar[str] = "none" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + self.blocksize = blocksize + self.nblocks = 0 + self.fetcher = fetcher + self.size = size + self.hit_count = 0 + self.miss_count = 0 + # the bytes that we actually requested + self.total_requested_bytes = 0 + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + if start is None: + start = 0 + if stop is None: + stop = self.size + if start >= self.size or start >= stop: + return b"" + return self.fetcher(start, stop) + + def _reset_stats(self) -> None: + """Reset hit and miss counts for a more ganular report e.g. by file.""" + self.hit_count = 0 + self.miss_count = 0 + self.total_requested_bytes = 0 + + def _log_stats(self) -> str: + """Return a formatted string of the cache statistics.""" + if self.hit_count == 0 and self.miss_count == 0: + # a cache that does nothing, this is for logs only + return "" + return f" , {self.name}: {self.hit_count} hits, {self.miss_count} misses, {self.total_requested_bytes} total requested bytes" + + def __repr__(self) -> str: + # TODO: use rich for better formatting + return f""" + <{self.__class__.__name__}: + block size : {self.blocksize} + block count : {self.nblocks} + file size : {self.size} + cache hits : {self.hit_count} + cache misses: {self.miss_count} + total requested bytes: {self.total_requested_bytes}> + """ + + +class MMapCache(BaseCache): + """memory-mapped sparse file cache + + Opens temporary file, which is filled blocks-wise when data is requested. + Ensure there is enough disc space in the temporary location. + + This cache method might only work on posix + + Parameters + ---------- + blocksize: int + How far to read ahead in numbers of bytes + fetcher: Fetcher + Function of the form f(start, end) which gets bytes from remote as + specified + size: int + How big this file is + location: str + Where to create the temporary file. If None, a temporary file is + created using tempfile.TemporaryFile(). + blocks: set[int] + Set of block numbers that have already been fetched. If None, an empty + set is created. + multi_fetcher: MultiFetcher + Function of the form f([(start, end)]) which gets bytes from remote + as specified. This function is used to fetch multiple blocks at once. + If not specified, the fetcher function is used instead. + """ + + name = "mmap" + + def __init__( + self, + blocksize: int, + fetcher: Fetcher, + size: int, + location: str | None = None, + blocks: set[int] | None = None, + multi_fetcher: MultiFetcher | None = None, + ) -> None: + super().__init__(blocksize, fetcher, size) + self.blocks = set() if blocks is None else blocks + self.location = location + self.multi_fetcher = multi_fetcher + self.cache = self._makefile() + + def _makefile(self) -> mmap.mmap | bytearray: + import mmap + import tempfile + + if self.size == 0: + return bytearray() + + # posix version + if self.location is None or not os.path.exists(self.location): + if self.location is None: + fd = tempfile.TemporaryFile() + self.blocks = set() + else: + fd = open(self.location, "wb+") + fd.seek(self.size - 1) + fd.write(b"1") + fd.flush() + else: + fd = open(self.location, "r+b") + + return mmap.mmap(fd.fileno(), self.size) + + def _fetch(self, start: int | None, end: int | None) -> bytes: + logger.debug(f"MMap cache fetching {start}-{end}") + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + start_block = start // self.blocksize + end_block = end // self.blocksize + block_range = range(start_block, end_block + 1) + # Determine which blocks need to be fetched. This sequence is sorted by construction. + need = (i for i in block_range if i not in self.blocks) + # Count the number of blocks already cached + self.hit_count += sum(1 for i in block_range if i in self.blocks) + + ranges = [] + + # Consolidate needed blocks. + # Algorithm adapted from Python 2.x itertools documentation. + # We are grouping an enumerated sequence of blocks. By comparing when the difference + # between an ascending range (provided by enumerate) and the needed block numbers + # we can detect when the block number skips values. The key computes this difference. + # Whenever the difference changes, we know that we have previously cached block(s), + # and a new group is started. In other words, this algorithm neatly groups + # runs of consecutive block numbers so they can be fetched together. + for _, _blocks in groupby(enumerate(need), key=lambda x: x[0] - x[1]): + # Extract the blocks from the enumerated sequence + _blocks = tuple(map(itemgetter(1), _blocks)) + # Compute start of first block + sstart = _blocks[0] * self.blocksize + # Compute the end of the last block. Last block may not be full size. + send = min(_blocks[-1] * self.blocksize + self.blocksize, self.size) + + # Fetch bytes (could be multiple consecutive blocks) + self.total_requested_bytes += send - sstart + logger.debug( + f"MMap get blocks {_blocks[0]}-{_blocks[-1]} ({sstart}-{send})" + ) + ranges.append((sstart, send)) + + # Update set of cached blocks + self.blocks.update(_blocks) + # Update cache statistics with number of blocks we had to cache + self.miss_count += len(_blocks) + + if not ranges: + return self.cache[start:end] + + if self.multi_fetcher: + logger.debug(f"MMap get blocks {ranges}") + for idx, r in enumerate(self.multi_fetcher(ranges)): + (sstart, send) = ranges[idx] + logger.debug(f"MMap copy block ({sstart}-{send}") + self.cache[sstart:send] = r + else: + for sstart, send in ranges: + logger.debug(f"MMap get block ({sstart}-{send}") + self.cache[sstart:send] = self.fetcher(sstart, send) + + return self.cache[start:end] + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__.copy() + # Remove the unpicklable entries. + del state["cache"] + return state + + def __setstate__(self, state: dict[str, Any]) -> None: + # Restore instance attributes + self.__dict__.update(state) + self.cache = self._makefile() + + +class ReadAheadCache(BaseCache): + """Cache which reads only when we get beyond a block of data + + This is a much simpler version of BytesCache, and does not attempt to + fill holes in the cache or keep fragments alive. It is best suited to + many small reads in a sequential order (e.g., reading lines from a file). + """ + + name = "readahead" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + super().__init__(blocksize, fetcher, size) + self.cache = b"" + self.start = 0 + self.end = 0 + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None or end > self.size: + end = self.size + if start >= self.size or start >= end: + return b"" + l = end - start + if start >= self.start and end <= self.end: + # cache hit + self.hit_count += 1 + return self.cache[start - self.start : end - self.start] + elif self.start <= start < self.end: + # partial hit + self.miss_count += 1 + part = self.cache[start - self.start :] + l -= len(part) + start = self.end + else: + # miss + self.miss_count += 1 + part = b"" + end = min(self.size, end + self.blocksize) + self.total_requested_bytes += end - start + self.cache = self.fetcher(start, end) # new block replaces old + self.start = start + self.end = self.start + len(self.cache) + return part + self.cache[:l] + + +class FirstChunkCache(BaseCache): + """Caches the first block of a file only + + This may be useful for file types where the metadata is stored in the header, + but is randomly accessed. + """ + + name = "first" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + if blocksize > size: + # this will buffer the whole thing + blocksize = size + super().__init__(blocksize, fetcher, size) + self.cache: bytes | None = None + + def _fetch(self, start: int | None, end: int | None) -> bytes: + start = start or 0 + if start > self.size: + logger.debug("FirstChunkCache: requested start > file size") + return b"" + + end = min(end, self.size) + + if start < self.blocksize: + if self.cache is None: + self.miss_count += 1 + if end > self.blocksize: + self.total_requested_bytes += end + data = self.fetcher(0, end) + self.cache = data[: self.blocksize] + return data[start:] + self.cache = self.fetcher(0, self.blocksize) + self.total_requested_bytes += self.blocksize + part = self.cache[start:end] + if end > self.blocksize: + self.total_requested_bytes += end - self.blocksize + part += self.fetcher(self.blocksize, end) + self.hit_count += 1 + return part + else: + self.miss_count += 1 + self.total_requested_bytes += end - start + return self.fetcher(start, end) + + +class BlockCache(BaseCache): + """ + Cache holding memory as a set of blocks. + + Requests are only ever made ``blocksize`` at a time, and are + stored in an LRU cache. The least recently accessed block is + discarded when more than ``maxblocks`` are stored. + + Parameters + ---------- + blocksize : int + The number of bytes to store in each block. + Requests are only ever made for ``blocksize``, so this + should balance the overhead of making a request against + the granularity of the blocks. + fetcher : Callable + size : int + The total size of the file being cached. + maxblocks : int + The maximum number of blocks to cache for. The maximum memory + use for this cache is then ``blocksize * maxblocks``. + """ + + name = "blockcache" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, maxblocks: int = 32 + ) -> None: + super().__init__(blocksize, fetcher, size) + self.nblocks = math.ceil(size / blocksize) + self.maxblocks = maxblocks + self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block) + + def cache_info(self): + """ + The statistics on the block cache. + + Returns + ------- + NamedTuple + Returned directly from the LRU Cache used internally. + """ + return self._fetch_block_cached.cache_info() + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__ + del state["_fetch_block_cached"] + return state + + def __setstate__(self, state: dict[str, Any]) -> None: + self.__dict__.update(state) + self._fetch_block_cached = functools.lru_cache(state["maxblocks"])( + self._fetch_block + ) + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + + # byte position -> block numbers + start_block_number = start // self.blocksize + end_block_number = end // self.blocksize + + # these are cached, so safe to do multiple calls for the same start and end. + for block_number in range(start_block_number, end_block_number + 1): + self._fetch_block_cached(block_number) + + return self._read_cache( + start, + end, + start_block_number=start_block_number, + end_block_number=end_block_number, + ) + + def _fetch_block(self, block_number: int) -> bytes: + """ + Fetch the block of data for `block_number`. + """ + if block_number > self.nblocks: + raise ValueError( + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" + ) + + start = block_number * self.blocksize + end = start + self.blocksize + self.total_requested_bytes += end - start + self.miss_count += 1 + logger.info("BlockCache fetching block %d", block_number) + block_contents = super()._fetch(start, end) + return block_contents + + def _read_cache( + self, start: int, end: int, start_block_number: int, end_block_number: int + ) -> bytes: + """ + Read from our block cache. + + Parameters + ---------- + start, end : int + The start and end byte positions. + start_block_number, end_block_number : int + The start and end block numbers. + """ + start_pos = start % self.blocksize + end_pos = end % self.blocksize + + self.hit_count += 1 + if start_block_number == end_block_number: + block: bytes = self._fetch_block_cached(start_block_number) + return block[start_pos:end_pos] + + else: + # read from the initial + out = [self._fetch_block_cached(start_block_number)[start_pos:]] + + # intermediate blocks + # Note: it'd be nice to combine these into one big request. However + # that doesn't play nicely with our LRU cache. + out.extend( + map( + self._fetch_block_cached, + range(start_block_number + 1, end_block_number), + ) + ) + + # final block + out.append(self._fetch_block_cached(end_block_number)[:end_pos]) + + return b"".join(out) + + +class BytesCache(BaseCache): + """Cache which holds data in a in-memory bytes object + + Implements read-ahead by the block size, for semi-random reads progressing + through the file. + + Parameters + ---------- + trim: bool + As we read more data, whether to discard the start of the buffer when + we are more than a blocksize ahead of it. + """ + + name: ClassVar[str] = "bytes" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, trim: bool = True + ) -> None: + super().__init__(blocksize, fetcher, size) + self.cache = b"" + self.start: int | None = None + self.end: int | None = None + self.trim = trim + + def _fetch(self, start: int | None, end: int | None) -> bytes: + # TODO: only set start/end after fetch, in case it fails? + # is this where retry logic might go? + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + if ( + self.start is not None + and start >= self.start + and self.end is not None + and end < self.end + ): + # cache hit: we have all the required data + offset = start - self.start + self.hit_count += 1 + return self.cache[offset : offset + end - start] + + if self.blocksize: + bend = min(self.size, end + self.blocksize) + else: + bend = end + + if bend == start or start > self.size: + return b"" + + if (self.start is None or start < self.start) and ( + self.end is None or end > self.end + ): + # First read, or extending both before and after + self.total_requested_bytes += bend - start + self.miss_count += 1 + self.cache = self.fetcher(start, bend) + self.start = start + else: + assert self.start is not None + assert self.end is not None + self.miss_count += 1 + + if start < self.start: + if self.end is None or self.end - end > self.blocksize: + self.total_requested_bytes += bend - start + self.cache = self.fetcher(start, bend) + self.start = start + else: + self.total_requested_bytes += self.start - start + new = self.fetcher(start, self.start) + self.start = start + self.cache = new + self.cache + elif self.end is not None and bend > self.end: + if self.end > self.size: + pass + elif end - self.end > self.blocksize: + self.total_requested_bytes += bend - start + self.cache = self.fetcher(start, bend) + self.start = start + else: + self.total_requested_bytes += bend - self.end + new = self.fetcher(self.end, bend) + self.cache = self.cache + new + + self.end = self.start + len(self.cache) + offset = start - self.start + out = self.cache[offset : offset + end - start] + if self.trim: + num = (self.end - self.start) // (self.blocksize + 1) + if num > 1: + self.start += self.blocksize * num + self.cache = self.cache[self.blocksize * num :] + return out + + def __len__(self) -> int: + return len(self.cache) + + +class AllBytes(BaseCache): + """Cache entire contents of the file""" + + name: ClassVar[str] = "all" + + def __init__( + self, + blocksize: int | None = None, + fetcher: Fetcher | None = None, + size: int | None = None, + data: bytes | None = None, + ) -> None: + super().__init__(blocksize, fetcher, size) # type: ignore[arg-type] + if data is None: + self.miss_count += 1 + self.total_requested_bytes += self.size + data = self.fetcher(0, self.size) + self.data = data + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + self.hit_count += 1 + return self.data[start:stop] + + +class KnownPartsOfAFile(BaseCache): + """ + Cache holding known file parts. + + Parameters + ---------- + blocksize: int + How far to read ahead in numbers of bytes + fetcher: func + Function of the form f(start, end) which gets bytes from remote as + specified + size: int + How big this file is + data: dict + A dictionary mapping explicit `(start, stop)` file-offset tuples + with known bytes. + strict: bool, default True + Whether to fetch reads that go beyond a known byte-range boundary. + If `False`, any read that ends outside a known part will be zero + padded. Note that zero padding will not be used for reads that + begin outside a known byte-range. + """ + + name: ClassVar[str] = "parts" + + def __init__( + self, + blocksize: int, + fetcher: Fetcher, + size: int, + data: dict[tuple[int, int], bytes] | None = None, + strict: bool = True, + **_: Any, + ): + super().__init__(blocksize, fetcher, size) + self.strict = strict + + # simple consolidation of contiguous blocks + if data: + old_offsets = sorted(data.keys()) + offsets = [old_offsets[0]] + blocks = [data.pop(old_offsets[0])] + for start, stop in old_offsets[1:]: + start0, stop0 = offsets[-1] + if start == stop0: + offsets[-1] = (start0, stop) + blocks[-1] += data.pop((start, stop)) + else: + offsets.append((start, stop)) + blocks.append(data.pop((start, stop))) + + self.data = dict(zip(offsets, blocks)) + else: + self.data = {} + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + if start is None: + start = 0 + if stop is None: + stop = self.size + + out = b"" + for (loc0, loc1), data in self.data.items(): + # If self.strict=False, use zero-padded data + # for reads beyond the end of a "known" buffer + if loc0 <= start < loc1: + off = start - loc0 + out = data[off : off + stop - start] + if not self.strict or loc0 <= stop <= loc1: + # The request is within a known range, or + # it begins within a known range, and we + # are allowed to pad reads beyond the + # buffer with zero + out += b"\x00" * (stop - start - len(out)) + self.hit_count += 1 + return out + else: + # The request ends outside a known range, + # and we are being "strict" about reads + # beyond the buffer + start = loc1 + break + + # We only get here if there is a request outside the + # known parts of the file. In an ideal world, this + # should never happen + if self.fetcher is None: + # We cannot fetch the data, so raise an error + raise ValueError(f"Read is outside the known file parts: {(start, stop)}. ") + # We can fetch the data, but should warn the user + # that this may be slow + warnings.warn( + f"Read is outside the known file parts: {(start, stop)}. " + f"IO/caching performance may be poor!" + ) + logger.debug(f"KnownPartsOfAFile cache fetching {start}-{stop}") + self.total_requested_bytes += stop - start + self.miss_count += 1 + return out + super()._fetch(start, stop) + + +class UpdatableLRU(Generic[P, T]): + """ + Custom implementation of LRU cache that allows updating keys + + Used by BackgroudBlockCache + """ + + class CacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int + currsize: int + + def __init__(self, func: Callable[P, T], max_size: int = 128) -> None: + self._cache: OrderedDict[Any, T] = collections.OrderedDict() + self._func = func + self._max_size = max_size + self._hits = 0 + self._misses = 0 + self._lock = threading.Lock() + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + if kwargs: + raise TypeError(f"Got unexpected keyword argument {kwargs.keys()}") + with self._lock: + if args in self._cache: + self._cache.move_to_end(args) + self._hits += 1 + return self._cache[args] + + result = self._func(*args, **kwargs) + + with self._lock: + self._cache[args] = result + self._misses += 1 + if len(self._cache) > self._max_size: + self._cache.popitem(last=False) + + return result + + def is_key_cached(self, *args: Any) -> bool: + with self._lock: + return args in self._cache + + def add_key(self, result: T, *args: Any) -> None: + with self._lock: + self._cache[args] = result + if len(self._cache) > self._max_size: + self._cache.popitem(last=False) + + def cache_info(self) -> UpdatableLRU.CacheInfo: + with self._lock: + return self.CacheInfo( + maxsize=self._max_size, + currsize=len(self._cache), + hits=self._hits, + misses=self._misses, + ) + + +class BackgroundBlockCache(BaseCache): + """ + Cache holding memory as a set of blocks with pre-loading of + the next block in the background. + + Requests are only ever made ``blocksize`` at a time, and are + stored in an LRU cache. The least recently accessed block is + discarded when more than ``maxblocks`` are stored. If the + next block is not in cache, it is loaded in a separate thread + in non-blocking way. + + Parameters + ---------- + blocksize : int + The number of bytes to store in each block. + Requests are only ever made for ``blocksize``, so this + should balance the overhead of making a request against + the granularity of the blocks. + fetcher : Callable + size : int + The total size of the file being cached. + maxblocks : int + The maximum number of blocks to cache for. The maximum memory + use for this cache is then ``blocksize * maxblocks``. + """ + + name: ClassVar[str] = "background" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, maxblocks: int = 32 + ) -> None: + super().__init__(blocksize, fetcher, size) + self.nblocks = math.ceil(size / blocksize) + self.maxblocks = maxblocks + self._fetch_block_cached = UpdatableLRU(self._fetch_block, maxblocks) + + self._thread_executor = ThreadPoolExecutor(max_workers=1) + self._fetch_future_block_number: int | None = None + self._fetch_future: Future[bytes] | None = None + self._fetch_future_lock = threading.Lock() + + def cache_info(self) -> UpdatableLRU.CacheInfo: + """ + The statistics on the block cache. + + Returns + ------- + NamedTuple + Returned directly from the LRU Cache used internally. + """ + return self._fetch_block_cached.cache_info() + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__ + del state["_fetch_block_cached"] + del state["_thread_executor"] + del state["_fetch_future_block_number"] + del state["_fetch_future"] + del state["_fetch_future_lock"] + return state + + def __setstate__(self, state) -> None: + self.__dict__.update(state) + self._fetch_block_cached = UpdatableLRU(self._fetch_block, state["maxblocks"]) + self._thread_executor = ThreadPoolExecutor(max_workers=1) + self._fetch_future_block_number = None + self._fetch_future = None + self._fetch_future_lock = threading.Lock() + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + + # byte position -> block numbers + start_block_number = start // self.blocksize + end_block_number = end // self.blocksize + + fetch_future_block_number = None + fetch_future = None + with self._fetch_future_lock: + # Background thread is running. Check we we can or must join it. + if self._fetch_future is not None: + assert self._fetch_future_block_number is not None + if self._fetch_future.done(): + logger.info("BlockCache joined background fetch without waiting.") + self._fetch_block_cached.add_key( + self._fetch_future.result(), self._fetch_future_block_number + ) + # Cleanup the fetch variables. Done with fetching the block. + self._fetch_future_block_number = None + self._fetch_future = None + else: + # Must join if we need the block for the current fetch + must_join = bool( + start_block_number + <= self._fetch_future_block_number + <= end_block_number + ) + if must_join: + # Copy to the local variables to release lock + # before waiting for result + fetch_future_block_number = self._fetch_future_block_number + fetch_future = self._fetch_future + + # Cleanup the fetch variables. Have a local copy. + self._fetch_future_block_number = None + self._fetch_future = None + + # Need to wait for the future for the current read + if fetch_future is not None: + logger.info("BlockCache waiting for background fetch.") + # Wait until result and put it in cache + self._fetch_block_cached.add_key( + fetch_future.result(), fetch_future_block_number + ) + + # these are cached, so safe to do multiple calls for the same start and end. + for block_number in range(start_block_number, end_block_number + 1): + self._fetch_block_cached(block_number) + + # fetch next block in the background if nothing is running in the background, + # the block is within file and it is not already cached + end_block_plus_1 = end_block_number + 1 + with self._fetch_future_lock: + if ( + self._fetch_future is None + and end_block_plus_1 <= self.nblocks + and not self._fetch_block_cached.is_key_cached(end_block_plus_1) + ): + self._fetch_future_block_number = end_block_plus_1 + self._fetch_future = self._thread_executor.submit( + self._fetch_block, end_block_plus_1, "async" + ) + + return self._read_cache( + start, + end, + start_block_number=start_block_number, + end_block_number=end_block_number, + ) + + def _fetch_block(self, block_number: int, log_info: str = "sync") -> bytes: + """ + Fetch the block of data for `block_number`. + """ + if block_number > self.nblocks: + raise ValueError( + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" + ) + + start = block_number * self.blocksize + end = start + self.blocksize + logger.info("BlockCache fetching block (%s) %d", log_info, block_number) + self.total_requested_bytes += end - start + self.miss_count += 1 + block_contents = super()._fetch(start, end) + return block_contents + + def _read_cache( + self, start: int, end: int, start_block_number: int, end_block_number: int + ) -> bytes: + """ + Read from our block cache. + + Parameters + ---------- + start, end : int + The start and end byte positions. + start_block_number, end_block_number : int + The start and end block numbers. + """ + start_pos = start % self.blocksize + end_pos = end % self.blocksize + + # kind of pointless to count this as a hit, but it is + self.hit_count += 1 + + if start_block_number == end_block_number: + block = self._fetch_block_cached(start_block_number) + return block[start_pos:end_pos] + + else: + # read from the initial + out = [self._fetch_block_cached(start_block_number)[start_pos:]] + + # intermediate blocks + # Note: it'd be nice to combine these into one big request. However + # that doesn't play nicely with our LRU cache. + out.extend( + map( + self._fetch_block_cached, + range(start_block_number + 1, end_block_number), + ) + ) + + # final block + out.append(self._fetch_block_cached(end_block_number)[:end_pos]) + + return b"".join(out) + + +caches: dict[str | None, type[BaseCache]] = { + # one custom case + None: BaseCache, +} + + +def register_cache(cls: type[BaseCache], clobber: bool = False) -> None: + """'Register' cache implementation. + + Parameters + ---------- + clobber: bool, optional + If set to True (default is False) - allow to overwrite existing + entry. + + Raises + ------ + ValueError + """ + name = cls.name + if not clobber and name in caches: + raise ValueError(f"Cache with name {name!r} is already known: {caches[name]}") + caches[name] = cls + + +for c in ( + BaseCache, + MMapCache, + BytesCache, + ReadAheadCache, + BlockCache, + FirstChunkCache, + AllBytes, + KnownPartsOfAFile, + BackgroundBlockCache, +): + register_cache(c) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/callbacks.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/callbacks.py new file mode 100644 index 0000000000000000000000000000000000000000..7ca99ca6ac3cd69b28bcd1550f6550e8e648c5fe --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/callbacks.py @@ -0,0 +1,324 @@ +from functools import wraps + + +class Callback: + """ + Base class and interface for callback mechanism + + This class can be used directly for monitoring file transfers by + providing ``callback=Callback(hooks=...)`` (see the ``hooks`` argument, + below), or subclassed for more specialised behaviour. + + Parameters + ---------- + size: int (optional) + Nominal quantity for the value that corresponds to a complete + transfer, e.g., total number of tiles or total number of + bytes + value: int (0) + Starting internal counter value + hooks: dict or None + A dict of named functions to be called on each update. The signature + of these must be ``f(size, value, **kwargs)`` + """ + + def __init__(self, size=None, value=0, hooks=None, **kwargs): + self.size = size + self.value = value + self.hooks = hooks or {} + self.kw = kwargs + + def __enter__(self): + return self + + def __exit__(self, *exc_args): + self.close() + + def close(self): + """Close callback.""" + + def branched(self, path_1, path_2, **kwargs): + """ + Return callback for child transfers + + If this callback is operating at a higher level, e.g., put, which may + trigger transfers that can also be monitored. The function returns a callback + that has to be passed to the child method, e.g., put_file, + as `callback=` argument. + + The implementation uses `callback.branch` for compatibility. + When implementing callbacks, it is recommended to override this function instead + of `branch` and avoid calling `super().branched(...)`. + + Prefer using this function over `branch`. + + Parameters + ---------- + path_1: str + Child's source path + path_2: str + Child's destination path + **kwargs: + Arbitrary keyword arguments + + Returns + ------- + callback: Callback + A callback instance to be passed to the child method + """ + self.branch(path_1, path_2, kwargs) + # mutate kwargs so that we can force the caller to pass "callback=" explicitly + return kwargs.pop("callback", DEFAULT_CALLBACK) + + def branch_coro(self, fn): + """ + Wraps a coroutine, and pass a new child callback to it. + """ + + @wraps(fn) + async def func(path1, path2: str, **kwargs): + with self.branched(path1, path2, **kwargs) as child: + return await fn(path1, path2, callback=child, **kwargs) + + return func + + def set_size(self, size): + """ + Set the internal maximum size attribute + + Usually called if not initially set at instantiation. Note that this + triggers a ``call()``. + + Parameters + ---------- + size: int + """ + self.size = size + self.call() + + def absolute_update(self, value): + """ + Set the internal value state + + Triggers ``call()`` + + Parameters + ---------- + value: int + """ + self.value = value + self.call() + + def relative_update(self, inc=1): + """ + Delta increment the internal counter + + Triggers ``call()`` + + Parameters + ---------- + inc: int + """ + self.value += inc + self.call() + + def call(self, hook_name=None, **kwargs): + """ + Execute hook(s) with current state + + Each function is passed the internal size and current value + + Parameters + ---------- + hook_name: str or None + If given, execute on this hook + kwargs: passed on to (all) hook(s) + """ + if not self.hooks: + return + kw = self.kw.copy() + kw.update(kwargs) + if hook_name: + if hook_name not in self.hooks: + return + return self.hooks[hook_name](self.size, self.value, **kw) + for hook in self.hooks.values() or []: + hook(self.size, self.value, **kw) + + def wrap(self, iterable): + """ + Wrap an iterable to call ``relative_update`` on each iterations + + Parameters + ---------- + iterable: Iterable + The iterable that is being wrapped + """ + for item in iterable: + self.relative_update() + yield item + + def branch(self, path_1, path_2, kwargs): + """ + Set callbacks for child transfers + + If this callback is operating at a higher level, e.g., put, which may + trigger transfers that can also be monitored. The passed kwargs are + to be *mutated* to add ``callback=``, if this class supports branching + to children. + + Parameters + ---------- + path_1: str + Child's source path + path_2: str + Child's destination path + kwargs: dict + arguments passed to child method, e.g., put_file. + + Returns + ------- + + """ + return None + + def no_op(self, *_, **__): + pass + + def __getattr__(self, item): + """ + If undefined methods are called on this class, nothing happens + """ + return self.no_op + + @classmethod + def as_callback(cls, maybe_callback=None): + """Transform callback=... into Callback instance + + For the special value of ``None``, return the global instance of + ``NoOpCallback``. This is an alternative to including + ``callback=DEFAULT_CALLBACK`` directly in a method signature. + """ + if maybe_callback is None: + return DEFAULT_CALLBACK + return maybe_callback + + +class NoOpCallback(Callback): + """ + This implementation of Callback does exactly nothing + """ + + def call(self, *args, **kwargs): + return None + + +class DotPrinterCallback(Callback): + """ + Simple example Callback implementation + + Almost identical to Callback with a hook that prints a char; here we + demonstrate how the outer layer may print "#" and the inner layer "." + """ + + def __init__(self, chr_to_print="#", **kwargs): + self.chr = chr_to_print + super().__init__(**kwargs) + + def branch(self, path_1, path_2, kwargs): + """Mutate kwargs to add new instance with different print char""" + kwargs["callback"] = DotPrinterCallback(".") + + def call(self, **kwargs): + """Just outputs a character""" + print(self.chr, end="") + + +class TqdmCallback(Callback): + """ + A callback to display a progress bar using tqdm + + Parameters + ---------- + tqdm_kwargs : dict, (optional) + Any argument accepted by the tqdm constructor. + See the `tqdm doc `_. + Will be forwarded to `tqdm_cls`. + tqdm_cls: (optional) + subclass of `tqdm.tqdm`. If not passed, it will default to `tqdm.tqdm`. + + Examples + -------- + >>> import fsspec + >>> from fsspec.callbacks import TqdmCallback + >>> fs = fsspec.filesystem("memory") + >>> path2distant_data = "/your-path" + >>> fs.upload( + ".", + path2distant_data, + recursive=True, + callback=TqdmCallback(), + ) + + You can forward args to tqdm using the ``tqdm_kwargs`` parameter. + + >>> fs.upload( + ".", + path2distant_data, + recursive=True, + callback=TqdmCallback(tqdm_kwargs={"desc": "Your tqdm description"}), + ) + + You can also customize the progress bar by passing a subclass of `tqdm`. + + .. code-block:: python + + class TqdmFormat(tqdm): + '''Provides a `total_time` format parameter''' + @property + def format_dict(self): + d = super().format_dict + total_time = d["elapsed"] * (d["total"] or 0) / max(d["n"], 1) + d.update(total_time=self.format_interval(total_time) + " in total") + return d + + >>> with TqdmCallback( + tqdm_kwargs={ + "desc": "desc", + "bar_format": "{total_time}: {percentage:.0f}%|{bar}{r_bar}", + }, + tqdm_cls=TqdmFormat, + ) as callback: + fs.upload(".", path2distant_data, recursive=True, callback=callback) + """ + + def __init__(self, tqdm_kwargs=None, *args, **kwargs): + try: + from tqdm import tqdm + + except ImportError as exce: + raise ImportError( + "Using TqdmCallback requires tqdm to be installed" + ) from exce + + self._tqdm_cls = kwargs.pop("tqdm_cls", tqdm) + self._tqdm_kwargs = tqdm_kwargs or {} + self.tqdm = None + super().__init__(*args, **kwargs) + + def call(self, *args, **kwargs): + if self.tqdm is None: + self.tqdm = self._tqdm_cls(total=self.size, **self._tqdm_kwargs) + self.tqdm.total = self.size + self.tqdm.update(self.value - self.tqdm.n) + + def close(self): + if self.tqdm is not None: + self.tqdm.close() + self.tqdm = None + + def __del__(self): + return self.close() + + +DEFAULT_CALLBACK = _DEFAULT_CALLBACK = NoOpCallback() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/compression.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/compression.py new file mode 100644 index 0000000000000000000000000000000000000000..e21da562bbab49c2ad60e9d9beb546af8dadea45 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/compression.py @@ -0,0 +1,182 @@ +"""Helper functions for a standard streaming compression API""" + +from zipfile import ZipFile + +import fsspec.utils +from fsspec.spec import AbstractBufferedFile + + +def noop_file(file, mode, **kwargs): + return file + + +# TODO: files should also be available as contexts +# should be functions of the form func(infile, mode=, **kwargs) -> file-like +compr = {None: noop_file} + + +def register_compression(name, callback, extensions, force=False): + """Register an "inferable" file compression type. + + Registers transparent file compression type for use with fsspec.open. + Compression can be specified by name in open, or "infer"-ed for any files + ending with the given extensions. + + Args: + name: (str) The compression type name. Eg. "gzip". + callback: A callable of form (infile, mode, **kwargs) -> file-like. + Accepts an input file-like object, the target mode and kwargs. + Returns a wrapped file-like object. + extensions: (str, Iterable[str]) A file extension, or list of file + extensions for which to infer this compression scheme. Eg. "gz". + force: (bool) Force re-registration of compression type or extensions. + + Raises: + ValueError: If name or extensions already registered, and not force. + + """ + if isinstance(extensions, str): + extensions = [extensions] + + # Validate registration + if name in compr and not force: + raise ValueError(f"Duplicate compression registration: {name}") + + for ext in extensions: + if ext in fsspec.utils.compressions and not force: + raise ValueError(f"Duplicate compression file extension: {ext} ({name})") + + compr[name] = callback + + for ext in extensions: + fsspec.utils.compressions[ext] = name + + +def unzip(infile, mode="rb", filename=None, **kwargs): + if "r" not in mode: + filename = filename or "file" + z = ZipFile(infile, mode="w", **kwargs) + fo = z.open(filename, mode="w") + fo.close = lambda closer=fo.close: closer() or z.close() + return fo + z = ZipFile(infile) + if filename is None: + filename = z.namelist()[0] + return z.open(filename, mode="r", **kwargs) + + +register_compression("zip", unzip, "zip") + +try: + from bz2 import BZ2File +except ImportError: + pass +else: + register_compression("bz2", BZ2File, "bz2") + +try: # pragma: no cover + from isal import igzip + + def isal(infile, mode="rb", **kwargs): + return igzip.IGzipFile(fileobj=infile, mode=mode, **kwargs) + + register_compression("gzip", isal, "gz") +except ImportError: + from gzip import GzipFile + + register_compression( + "gzip", lambda f, **kwargs: GzipFile(fileobj=f, **kwargs), "gz" + ) + +try: + from lzma import LZMAFile + + register_compression("lzma", LZMAFile, "lzma") + register_compression("xz", LZMAFile, "xz") +except ImportError: + pass + +try: + import lzmaffi + + register_compression("lzma", lzmaffi.LZMAFile, "lzma", force=True) + register_compression("xz", lzmaffi.LZMAFile, "xz", force=True) +except ImportError: + pass + + +class SnappyFile(AbstractBufferedFile): + def __init__(self, infile, mode, **kwargs): + import snappy + + super().__init__( + fs=None, path="snappy", mode=mode.strip("b") + "b", size=999999999, **kwargs + ) + self.infile = infile + if "r" in mode: + self.codec = snappy.StreamDecompressor() + else: + self.codec = snappy.StreamCompressor() + + def _upload_chunk(self, final=False): + self.buffer.seek(0) + out = self.codec.add_chunk(self.buffer.read()) + self.infile.write(out) + return True + + def seek(self, loc, whence=0): + raise NotImplementedError("SnappyFile is not seekable") + + def seekable(self): + return False + + def _fetch_range(self, start, end): + """Get the specified set of bytes from remote""" + data = self.infile.read(end - start) + return self.codec.decompress(data) + + +try: + import snappy + + snappy.compress(b"") + # Snappy may use the .sz file extension, but this is not part of the + # standard implementation. + register_compression("snappy", SnappyFile, []) + +except (ImportError, NameError, AttributeError): + pass + +try: + import lz4.frame + + register_compression("lz4", lz4.frame.open, "lz4") +except ImportError: + pass + +try: + # zstd in the standard library for python >= 3.14 + from compression.zstd import ZstdFile + + register_compression("zstd", ZstdFile, "zst") + +except ImportError: + try: + import zstandard as zstd + + def zstandard_file(infile, mode="rb"): + if "r" in mode: + cctx = zstd.ZstdDecompressor() + return cctx.stream_reader(infile) + else: + cctx = zstd.ZstdCompressor(level=10) + return cctx.stream_writer(infile) + + register_compression("zstd", zstandard_file, "zst") + except ImportError: + pass + + +def available_compressions(): + """Return a list of the implemented compressions.""" + return list(compr) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/config.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/config.py new file mode 100644 index 0000000000000000000000000000000000000000..76d9af14aaf7df47c4551c169f27b05abf9c269e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/config.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +import configparser +import json +import os +import warnings +from typing import Any + +conf: dict[str, dict[str, Any]] = {} +default_conf_dir = os.path.join(os.path.expanduser("~"), ".config/fsspec") +conf_dir = os.environ.get("FSSPEC_CONFIG_DIR", default_conf_dir) + + +def set_conf_env(conf_dict, envdict=os.environ): + """Set config values from environment variables + + Looks for variables of the form ``FSSPEC_`` and + ``FSSPEC__``. For ``FSSPEC_`` the value is parsed + as a json dictionary and used to ``update`` the config of the + corresponding protocol. For ``FSSPEC__`` there is no + attempt to convert the string value, but the kwarg keys will be lower-cased. + + The ``FSSPEC__`` variables are applied after the + ``FSSPEC_`` ones. + + Parameters + ---------- + conf_dict : dict(str, dict) + This dict will be mutated + envdict : dict-like(str, str) + Source for the values - usually the real environment + """ + kwarg_keys = [] + for key in envdict: + if key.startswith("FSSPEC_") and len(key) > 7 and key[7] != "_": + if key.count("_") > 1: + kwarg_keys.append(key) + continue + try: + value = json.loads(envdict[key]) + except json.decoder.JSONDecodeError as ex: + warnings.warn( + f"Ignoring environment variable {key} due to a parse failure: {ex}" + ) + else: + if isinstance(value, dict): + _, proto = key.split("_", 1) + conf_dict.setdefault(proto.lower(), {}).update(value) + else: + warnings.warn( + f"Ignoring environment variable {key} due to not being a dict:" + f" {type(value)}" + ) + elif key.startswith("FSSPEC"): + warnings.warn( + f"Ignoring environment variable {key} due to having an unexpected name" + ) + + for key in kwarg_keys: + _, proto, kwarg = key.split("_", 2) + conf_dict.setdefault(proto.lower(), {})[kwarg.lower()] = envdict[key] + + +def set_conf_files(cdir, conf_dict): + """Set config values from files + + Scans for INI and JSON files in the given dictionary, and uses their + contents to set the config. In case of repeated values, later values + win. + + In the case of INI files, all values are strings, and these will not + be converted. + + Parameters + ---------- + cdir : str + Directory to search + conf_dict : dict(str, dict) + This dict will be mutated + """ + if not os.path.isdir(cdir): + return + allfiles = sorted(os.listdir(cdir)) + for fn in allfiles: + if fn.endswith(".ini"): + ini = configparser.ConfigParser() + ini.read(os.path.join(cdir, fn)) + for key in ini: + if key == "DEFAULT": + continue + conf_dict.setdefault(key, {}).update(dict(ini[key])) + if fn.endswith(".json"): + with open(os.path.join(cdir, fn)) as f: + js = json.load(f) + for key in js: + conf_dict.setdefault(key, {}).update(dict(js[key])) + + +def apply_config(cls, kwargs, conf_dict=None): + """Supply default values for kwargs when instantiating class + + Augments the passed kwargs, by finding entries in the config dict + which match the classes ``.protocol`` attribute (one or more str) + + Parameters + ---------- + cls : file system implementation + kwargs : dict + conf_dict : dict of dict + Typically this is the global configuration + + Returns + ------- + dict : the modified set of kwargs + """ + if conf_dict is None: + conf_dict = conf + protos = cls.protocol if isinstance(cls.protocol, (tuple, list)) else [cls.protocol] + kw = {} + for proto in protos: + # default kwargs from the current state of the config + if proto in conf_dict: + kw.update(conf_dict[proto]) + # explicit kwargs always win + kw.update(**kwargs) + kwargs = kw + return kwargs + + +set_conf_files(conf_dir, conf) +set_conf_env(conf) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/conftest.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..6874a42c4895c3c7b973dc5d63fd4488a4e60b44 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/conftest.py @@ -0,0 +1,55 @@ +import os +import shutil +import subprocess +import sys +import time + +import pytest + +import fsspec +from fsspec.implementations.cached import CachingFileSystem + + +@pytest.fixture() +def m(): + """ + Fixture providing a memory filesystem. + """ + m = fsspec.filesystem("memory") + m.store.clear() + m.pseudo_dirs.clear() + m.pseudo_dirs.append("") + try: + yield m + finally: + m.store.clear() + m.pseudo_dirs.clear() + m.pseudo_dirs.append("") + + +@pytest.fixture +def ftp_writable(tmpdir): + """ + Fixture providing a writable FTP filesystem. + """ + pytest.importorskip("pyftpdlib") + from fsspec.implementations.ftp import FTPFileSystem + + FTPFileSystem.clear_instance_cache() # remove lingering connections + CachingFileSystem.clear_instance_cache() + d = str(tmpdir) + with open(os.path.join(d, "out"), "wb") as f: + f.write(b"hello" * 10000) + P = subprocess.Popen( + [sys.executable, "-m", "pyftpdlib", "-d", d, "-u", "user", "-P", "pass", "-w"] + ) + try: + time.sleep(1) + yield "localhost", 2121, "user", "pass" + finally: + P.terminate() + P.wait() + try: + shutil.rmtree(tmpdir) + except Exception: + pass diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/core.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/core.py new file mode 100644 index 0000000000000000000000000000000000000000..d8e75572bc0e31b5a12faee73275760e421aadb0 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/core.py @@ -0,0 +1,743 @@ +from __future__ import annotations + +import io +import logging +import os +import re +from glob import has_magic +from pathlib import Path + +# for backwards compat, we export cache things from here too +from fsspec.caching import ( # noqa: F401 + BaseCache, + BlockCache, + BytesCache, + MMapCache, + ReadAheadCache, + caches, +) +from fsspec.compression import compr +from fsspec.config import conf +from fsspec.registry import filesystem, get_filesystem_class +from fsspec.utils import ( + _unstrip_protocol, + build_name_function, + infer_compression, + stringify_path, +) + +logger = logging.getLogger("fsspec") + + +class OpenFile: + """ + File-like object to be used in a context + + Can layer (buffered) text-mode and compression over any file-system, which + are typically binary-only. + + These instances are safe to serialize, as the low-level file object + is not created until invoked using ``with``. + + Parameters + ---------- + fs: FileSystem + The file system to use for opening the file. Should be a subclass or duck-type + with ``fsspec.spec.AbstractFileSystem`` + path: str + Location to open + mode: str like 'rb', optional + Mode of the opened file + compression: str or None, optional + Compression to apply + encoding: str or None, optional + The encoding to use if opened in text mode. + errors: str or None, optional + How to handle encoding errors if opened in text mode. + newline: None or str + Passed to TextIOWrapper in text mode, how to handle line endings. + autoopen: bool + If True, calls open() immediately. Mostly used by pickle + pos: int + If given and autoopen is True, seek to this location immediately + """ + + def __init__( + self, + fs, + path, + mode="rb", + compression=None, + encoding=None, + errors=None, + newline=None, + ): + self.fs = fs + self.path = path + self.mode = mode + self.compression = get_compression(path, compression) + self.encoding = encoding + self.errors = errors + self.newline = newline + self.fobjects = [] + + def __reduce__(self): + return ( + OpenFile, + ( + self.fs, + self.path, + self.mode, + self.compression, + self.encoding, + self.errors, + self.newline, + ), + ) + + def __repr__(self): + return f"" + + def __enter__(self): + mode = self.mode.replace("t", "").replace("b", "") + "b" + + try: + f = self.fs.open(self.path, mode=mode) + except FileNotFoundError as e: + if has_magic(self.path): + raise FileNotFoundError( + "%s not found. The URL contains glob characters: you maybe needed\n" + "to pass expand=True in fsspec.open() or the storage_options of \n" + "your library. You can also set the config value 'open_expand'\n" + "before import, or fsspec.core.DEFAULT_EXPAND at runtime, to True.", + self.path, + ) from e + raise + + self.fobjects = [f] + + if self.compression is not None: + compress = compr[self.compression] + f = compress(f, mode=mode[0]) + self.fobjects.append(f) + + if "b" not in self.mode: + # assume, for example, that 'r' is equivalent to 'rt' as in builtin + f = PickleableTextIOWrapper( + f, encoding=self.encoding, errors=self.errors, newline=self.newline + ) + self.fobjects.append(f) + + return self.fobjects[-1] + + def __exit__(self, *args): + self.close() + + @property + def full_name(self): + return _unstrip_protocol(self.path, self.fs) + + def open(self): + """Materialise this as a real open file without context + + The OpenFile object should be explicitly closed to avoid enclosed file + instances persisting. You must, therefore, keep a reference to the OpenFile + during the life of the file-like it generates. + """ + return self.__enter__() + + def close(self): + """Close all encapsulated file objects""" + for f in reversed(self.fobjects): + if "r" not in self.mode and not f.closed: + f.flush() + f.close() + self.fobjects.clear() + + +class OpenFiles(list): + """List of OpenFile instances + + Can be used in a single context, which opens and closes all of the + contained files. Normal list access to get the elements works as + normal. + + A special case is made for caching filesystems - the files will + be down/uploaded together at the start or end of the context, and + this may happen concurrently, if the target filesystem supports it. + """ + + def __init__(self, *args, mode="rb", fs=None): + self.mode = mode + self.fs = fs + self.files = [] + super().__init__(*args) + + def __enter__(self): + if self.fs is None: + raise ValueError("Context has already been used") + + fs = self.fs + while True: + if hasattr(fs, "open_many"): + # check for concurrent cache download; or set up for upload + self.files = fs.open_many(self) + return self.files + if hasattr(fs, "fs") and fs.fs is not None: + fs = fs.fs + else: + break + return [s.__enter__() for s in self] + + def __exit__(self, *args): + fs = self.fs + [s.__exit__(*args) for s in self] + if "r" not in self.mode: + while True: + if hasattr(fs, "open_many"): + # check for concurrent cache upload + fs.commit_many(self.files) + return + if hasattr(fs, "fs") and fs.fs is not None: + fs = fs.fs + else: + break + + def __getitem__(self, item): + out = super().__getitem__(item) + if isinstance(item, slice): + return OpenFiles(out, mode=self.mode, fs=self.fs) + return out + + def __repr__(self): + return f"" + + +def open_files( + urlpath, + mode="rb", + compression=None, + encoding="utf8", + errors=None, + name_function=None, + num=1, + protocol=None, + newline=None, + auto_mkdir=True, + expand=True, + **kwargs, +): + """Given a path or paths, return a list of ``OpenFile`` objects. + + For writing, a str path must contain the "*" character, which will be filled + in by increasing numbers, e.g., "part*" -> "part1", "part2" if num=2. + + For either reading or writing, can instead provide explicit list of paths. + + Parameters + ---------- + urlpath: string or list + Absolute or relative filepath(s). Prefix with a protocol like ``s3://`` + to read from alternative filesystems. To read from multiple files you + can pass a globstring or a list of paths, with the caveat that they + must all have the same protocol. + mode: 'rb', 'wt', etc. + compression: string or None + If given, open file using compression codec. Can either be a compression + name (a key in ``fsspec.compression.compr``) or "infer" to guess the + compression from the filename suffix. + encoding: str + For text mode only + errors: None or str + Passed to TextIOWrapper in text mode + name_function: function or None + if opening a set of files for writing, those files do not yet exist, + so we need to generate their names by formatting the urlpath for + each sequence number + num: int [1] + if writing mode, number of files we expect to create (passed to + name+function) + protocol: str or None + If given, overrides the protocol found in the URL. + newline: bytes or None + Used for line terminator in text mode. If None, uses system default; + if blank, uses no translation. + auto_mkdir: bool (True) + If in write mode, this will ensure the target directory exists before + writing, by calling ``fs.mkdirs(exist_ok=True)``. + expand: bool + **kwargs: dict + Extra options that make sense to a particular storage connection, e.g. + host, port, username, password, etc. + + Examples + -------- + >>> files = open_files('2015-*-*.csv') # doctest: +SKIP + >>> files = open_files( + ... 's3://bucket/2015-*-*.csv.gz', compression='gzip' + ... ) # doctest: +SKIP + + Returns + ------- + An ``OpenFiles`` instance, which is a list of ``OpenFile`` objects that can + be used as a single context + + Notes + ----- + For a full list of the available protocols and the implementations that + they map across to see the latest online documentation: + + - For implementations built into ``fsspec`` see + https://filesystem-spec.readthedocs.io/en/latest/api.html#built-in-implementations + - For implementations in separate packages see + https://filesystem-spec.readthedocs.io/en/latest/api.html#other-known-implementations + """ + fs, fs_token, paths = get_fs_token_paths( + urlpath, + mode, + num=num, + name_function=name_function, + storage_options=kwargs, + protocol=protocol, + expand=expand, + ) + if fs.protocol == "file": + fs.auto_mkdir = auto_mkdir + elif "r" not in mode and auto_mkdir: + parents = {fs._parent(path) for path in paths} + for parent in parents: + try: + fs.makedirs(parent, exist_ok=True) + except PermissionError: + pass + return OpenFiles( + [ + OpenFile( + fs, + path, + mode=mode, + compression=compression, + encoding=encoding, + errors=errors, + newline=newline, + ) + for path in paths + ], + mode=mode, + fs=fs, + ) + + +def _un_chain(path, kwargs): + # Avoid a circular import + from fsspec.implementations.cached import CachingFileSystem + + if "::" in path: + x = re.compile(".*[^a-z]+.*") # test for non protocol-like single word + bits = [] + for p in path.split("::"): + if "://" in p or x.match(p): + bits.append(p) + else: + bits.append(p + "://") + else: + bits = [path] + # [[url, protocol, kwargs], ...] + out = [] + previous_bit = None + kwargs = kwargs.copy() + for bit in reversed(bits): + protocol = kwargs.pop("protocol", None) or split_protocol(bit)[0] or "file" + cls = get_filesystem_class(protocol) + extra_kwargs = cls._get_kwargs_from_urls(bit) + kws = kwargs.pop(protocol, {}) + if bit is bits[0]: + kws.update(kwargs) + kw = dict( + **{k: v for k, v in extra_kwargs.items() if k not in kws or v != kws[k]}, + **kws, + ) + bit = cls._strip_protocol(bit) + if "target_protocol" not in kw and issubclass(cls, CachingFileSystem): + bit = previous_bit + out.append((bit, protocol, kw)) + previous_bit = bit + out.reverse() + return out + + +def url_to_fs(url, **kwargs): + """ + Turn fully-qualified and potentially chained URL into filesystem instance + + Parameters + ---------- + url : str + The fsspec-compatible URL + **kwargs: dict + Extra options that make sense to a particular storage connection, e.g. + host, port, username, password, etc. + + Returns + ------- + filesystem : FileSystem + The new filesystem discovered from ``url`` and created with + ``**kwargs``. + urlpath : str + The file-systems-specific URL for ``url``. + """ + url = stringify_path(url) + # non-FS arguments that appear in fsspec.open() + # inspect could keep this in sync with open()'s signature + known_kwargs = { + "compression", + "encoding", + "errors", + "expand", + "mode", + "name_function", + "newline", + "num", + } + kwargs = {k: v for k, v in kwargs.items() if k not in known_kwargs} + chain = _un_chain(url, kwargs) + inkwargs = {} + # Reverse iterate the chain, creating a nested target_* structure + for i, ch in enumerate(reversed(chain)): + urls, protocol, kw = ch + if i == len(chain) - 1: + inkwargs = dict(**kw, **inkwargs) + continue + inkwargs["target_options"] = dict(**kw, **inkwargs) + inkwargs["target_protocol"] = protocol + inkwargs["fo"] = urls + urlpath, protocol, _ = chain[0] + fs = filesystem(protocol, **inkwargs) + return fs, urlpath + + +DEFAULT_EXPAND = conf.get("open_expand", False) + + +def open( + urlpath, + mode="rb", + compression=None, + encoding="utf8", + errors=None, + protocol=None, + newline=None, + expand=None, + **kwargs, +): + """Given a path or paths, return one ``OpenFile`` object. + + Parameters + ---------- + urlpath: string or list + Absolute or relative filepath. Prefix with a protocol like ``s3://`` + to read from alternative filesystems. Should not include glob + character(s). + mode: 'rb', 'wt', etc. + compression: string or None + If given, open file using compression codec. Can either be a compression + name (a key in ``fsspec.compression.compr``) or "infer" to guess the + compression from the filename suffix. + encoding: str + For text mode only + errors: None or str + Passed to TextIOWrapper in text mode + protocol: str or None + If given, overrides the protocol found in the URL. + newline: bytes or None + Used for line terminator in text mode. If None, uses system default; + if blank, uses no translation. + expand: bool or None + Whether to regard file paths containing special glob characters as needing + expansion (finding the first match) or absolute. Setting False allows using + paths which do embed such characters. If None (default), this argument + takes its value from the DEFAULT_EXPAND module variable, which takes + its initial value from the "open_expand" config value at startup, which will + be False if not set. + **kwargs: dict + Extra options that make sense to a particular storage connection, e.g. + host, port, username, password, etc. + + Examples + -------- + >>> openfile = open('2015-01-01.csv') # doctest: +SKIP + >>> openfile = open( + ... 's3://bucket/2015-01-01.csv.gz', compression='gzip' + ... ) # doctest: +SKIP + >>> with openfile as f: + ... df = pd.read_csv(f) # doctest: +SKIP + ... + + Returns + ------- + ``OpenFile`` object. + + Notes + ----- + For a full list of the available protocols and the implementations that + they map across to see the latest online documentation: + + - For implementations built into ``fsspec`` see + https://filesystem-spec.readthedocs.io/en/latest/api.html#built-in-implementations + - For implementations in separate packages see + https://filesystem-spec.readthedocs.io/en/latest/api.html#other-known-implementations + """ + expand = DEFAULT_EXPAND if expand is None else expand + out = open_files( + urlpath=[urlpath], + mode=mode, + compression=compression, + encoding=encoding, + errors=errors, + protocol=protocol, + newline=newline, + expand=expand, + **kwargs, + ) + if not out: + raise FileNotFoundError(urlpath) + return out[0] + + +def open_local( + url: str | list[str] | Path | list[Path], + mode: str = "rb", + **storage_options: dict, +) -> str | list[str]: + """Open file(s) which can be resolved to local + + For files which either are local, or get downloaded upon open + (e.g., by file caching) + + Parameters + ---------- + url: str or list(str) + mode: str + Must be read mode + storage_options: + passed on to FS for or used by open_files (e.g., compression) + """ + if "r" not in mode: + raise ValueError("Can only ensure local files when reading") + of = open_files(url, mode=mode, **storage_options) + if not getattr(of[0].fs, "local_file", False): + raise ValueError( + "open_local can only be used on a filesystem which" + " has attribute local_file=True" + ) + with of as files: + paths = [f.name for f in files] + if (isinstance(url, str) and not has_magic(url)) or isinstance(url, Path): + return paths[0] + return paths + + +def get_compression(urlpath, compression): + if compression == "infer": + compression = infer_compression(urlpath) + if compression is not None and compression not in compr: + raise ValueError(f"Compression type {compression} not supported") + return compression + + +def split_protocol(urlpath): + """Return protocol, path pair""" + urlpath = stringify_path(urlpath) + if "://" in urlpath: + protocol, path = urlpath.split("://", 1) + if len(protocol) > 1: + # excludes Windows paths + return protocol, path + if urlpath.startswith("data:"): + return urlpath.split(":", 1) + return None, urlpath + + +def strip_protocol(urlpath): + """Return only path part of full URL, according to appropriate backend""" + protocol, _ = split_protocol(urlpath) + cls = get_filesystem_class(protocol) + return cls._strip_protocol(urlpath) + + +def expand_paths_if_needed(paths, mode, num, fs, name_function): + """Expand paths if they have a ``*`` in them (write mode) or any of ``*?[]`` + in them (read mode). + + :param paths: list of paths + mode: str + Mode in which to open files. + num: int + If opening in writing mode, number of files we expect to create. + fs: filesystem object + name_function: callable + If opening in writing mode, this callable is used to generate path + names. Names are generated for each partition by + ``urlpath.replace('*', name_function(partition_index))``. + :return: list of paths + """ + expanded_paths = [] + paths = list(paths) + + if "w" in mode: # read mode + if sum(1 for p in paths if "*" in p) > 1: + raise ValueError( + "When writing data, only one filename mask can be specified." + ) + num = max(num, len(paths)) + + for curr_path in paths: + if "*" in curr_path: + # expand using name_function + expanded_paths.extend(_expand_paths(curr_path, name_function, num)) + else: + expanded_paths.append(curr_path) + # if we generated more paths that asked for, trim the list + if len(expanded_paths) > num: + expanded_paths = expanded_paths[:num] + + else: # read mode + for curr_path in paths: + if has_magic(curr_path): + # expand using glob + expanded_paths.extend(fs.glob(curr_path)) + else: + expanded_paths.append(curr_path) + + return expanded_paths + + +def get_fs_token_paths( + urlpath, + mode="rb", + num=1, + name_function=None, + storage_options=None, + protocol=None, + expand=True, +): + """Filesystem, deterministic token, and paths from a urlpath and options. + + Parameters + ---------- + urlpath: string or iterable + Absolute or relative filepath, URL (may include protocols like + ``s3://``), or globstring pointing to data. + mode: str, optional + Mode in which to open files. + num: int, optional + If opening in writing mode, number of files we expect to create. + name_function: callable, optional + If opening in writing mode, this callable is used to generate path + names. Names are generated for each partition by + ``urlpath.replace('*', name_function(partition_index))``. + storage_options: dict, optional + Additional keywords to pass to the filesystem class. + protocol: str or None + To override the protocol specifier in the URL + expand: bool + Expand string paths for writing, assuming the path is a directory + """ + if isinstance(urlpath, (list, tuple, set)): + if not urlpath: + raise ValueError("empty urlpath sequence") + urlpath0 = stringify_path(next(iter(urlpath))) + else: + urlpath0 = stringify_path(urlpath) + storage_options = storage_options or {} + if protocol: + storage_options["protocol"] = protocol + chain = _un_chain(urlpath0, storage_options or {}) + inkwargs = {} + # Reverse iterate the chain, creating a nested target_* structure + for i, ch in enumerate(reversed(chain)): + urls, nested_protocol, kw = ch + if i == len(chain) - 1: + inkwargs = dict(**kw, **inkwargs) + continue + inkwargs["target_options"] = dict(**kw, **inkwargs) + inkwargs["target_protocol"] = nested_protocol + inkwargs["fo"] = urls + paths, protocol, _ = chain[0] + fs = filesystem(protocol, **inkwargs) + if isinstance(urlpath, (list, tuple, set)): + pchains = [ + _un_chain(stringify_path(u), storage_options or {})[0] for u in urlpath + ] + if len({pc[1] for pc in pchains}) > 1: + raise ValueError("Protocol mismatch getting fs from %s", urlpath) + paths = [pc[0] for pc in pchains] + else: + paths = fs._strip_protocol(paths) + if isinstance(paths, (list, tuple, set)): + if expand: + paths = expand_paths_if_needed(paths, mode, num, fs, name_function) + elif not isinstance(paths, list): + paths = list(paths) + else: + if ("w" in mode or "x" in mode) and expand: + paths = _expand_paths(paths, name_function, num) + elif "*" in paths: + paths = [f for f in sorted(fs.glob(paths)) if not fs.isdir(f)] + else: + paths = [paths] + + return fs, fs._fs_token, paths + + +def _expand_paths(path, name_function, num): + if isinstance(path, str): + if path.count("*") > 1: + raise ValueError("Output path spec must contain exactly one '*'.") + elif "*" not in path: + path = os.path.join(path, "*.part") + + if name_function is None: + name_function = build_name_function(num - 1) + + paths = [path.replace("*", name_function(i)) for i in range(num)] + if paths != sorted(paths): + logger.warning( + "In order to preserve order between partitions" + " paths created with ``name_function`` should " + "sort to partition order" + ) + elif isinstance(path, (tuple, list)): + assert len(path) == num + paths = list(path) + else: + raise ValueError( + "Path should be either\n" + "1. A list of paths: ['foo.json', 'bar.json', ...]\n" + "2. A directory: 'foo/\n" + "3. A path with a '*' in it: 'foo.*.json'" + ) + return paths + + +class PickleableTextIOWrapper(io.TextIOWrapper): + """TextIOWrapper cannot be pickled. This solves it. + + Requires that ``buffer`` be pickleable, which all instances of + AbstractBufferedFile are. + """ + + def __init__( + self, + buffer, + encoding=None, + errors=None, + newline=None, + line_buffering=False, + write_through=False, + ): + self.args = buffer, encoding, errors, newline, line_buffering, write_through + super().__init__(*self.args) + + def __reduce__(self): + return PickleableTextIOWrapper, self.args diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/dircache.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/dircache.py new file mode 100644 index 0000000000000000000000000000000000000000..eca19566b135e5a7a4f6e7407d56411ec58bfe44 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/dircache.py @@ -0,0 +1,98 @@ +import time +from collections.abc import MutableMapping +from functools import lru_cache + + +class DirCache(MutableMapping): + """ + Caching of directory listings, in a structure like:: + + {"path0": [ + {"name": "path0/file0", + "size": 123, + "type": "file", + ... + }, + {"name": "path0/file1", + }, + ... + ], + "path1": [...] + } + + Parameters to this class control listing expiry or indeed turn + caching off + """ + + def __init__( + self, + use_listings_cache=True, + listings_expiry_time=None, + max_paths=None, + **kwargs, + ): + """ + + Parameters + ---------- + use_listings_cache: bool + If False, this cache never returns items, but always reports KeyError, + and setting items has no effect + listings_expiry_time: int or float (optional) + Time in seconds that a listing is considered valid. If None, + listings do not expire. + max_paths: int (optional) + The number of most recent listings that are considered valid; 'recent' + refers to when the entry was set. + """ + self._cache = {} + self._times = {} + if max_paths: + self._q = lru_cache(max_paths + 1)(lambda key: self._cache.pop(key, None)) + self.use_listings_cache = use_listings_cache + self.listings_expiry_time = listings_expiry_time + self.max_paths = max_paths + + def __getitem__(self, item): + if self.listings_expiry_time is not None: + if self._times.get(item, 0) - time.time() < -self.listings_expiry_time: + del self._cache[item] + if self.max_paths: + self._q(item) + return self._cache[item] # maybe raises KeyError + + def clear(self): + self._cache.clear() + + def __len__(self): + return len(self._cache) + + def __contains__(self, item): + try: + self[item] + return True + except KeyError: + return False + + def __setitem__(self, key, value): + if not self.use_listings_cache: + return + if self.max_paths: + self._q(key) + self._cache[key] = value + if self.listings_expiry_time is not None: + self._times[key] = time.time() + + def __delitem__(self, key): + del self._cache[key] + + def __iter__(self): + entries = list(self._cache) + + return (k for k in entries if k in self) + + def __reduce__(self): + return ( + DirCache, + (self.use_listings_cache, self.listings_expiry_time, self.max_paths), + ) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/exceptions.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..ae8905475f02655f4fc5863931d99ca9da55db78 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/exceptions.py @@ -0,0 +1,18 @@ +""" +fsspec user-defined exception classes +""" + +import asyncio + + +class BlocksizeMismatchError(ValueError): + """ + Raised when a cached file is opened with a different blocksize than it was + written with + """ + + +class FSTimeoutError(asyncio.TimeoutError): + """ + Raised when a fsspec function timed out occurs + """ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/fuse.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/fuse.py new file mode 100644 index 0000000000000000000000000000000000000000..566d520fce3e94e3bbaee48c3c6acc9f1db315a8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/fuse.py @@ -0,0 +1,324 @@ +import argparse +import logging +import os +import stat +import threading +import time +from errno import EIO, ENOENT + +from fuse import FUSE, FuseOSError, LoggingMixIn, Operations + +from fsspec import __version__ +from fsspec.core import url_to_fs + +logger = logging.getLogger("fsspec.fuse") + + +class FUSEr(Operations): + def __init__(self, fs, path, ready_file=False): + self.fs = fs + self.cache = {} + self.root = path.rstrip("/") + "/" + self.counter = 0 + logger.info("Starting FUSE at %s", path) + self._ready_file = ready_file + + def getattr(self, path, fh=None): + logger.debug("getattr %s", path) + if self._ready_file and path in ["/.fuse_ready", ".fuse_ready"]: + return {"type": "file", "st_size": 5} + + path = "".join([self.root, path.lstrip("/")]).rstrip("/") + try: + info = self.fs.info(path) + except FileNotFoundError as exc: + raise FuseOSError(ENOENT) from exc + + data = {"st_uid": info.get("uid", 1000), "st_gid": info.get("gid", 1000)} + perm = info.get("mode", 0o777) + + if info["type"] != "file": + data["st_mode"] = stat.S_IFDIR | perm + data["st_size"] = 0 + data["st_blksize"] = 0 + else: + data["st_mode"] = stat.S_IFREG | perm + data["st_size"] = info["size"] + data["st_blksize"] = 5 * 2**20 + data["st_nlink"] = 1 + data["st_atime"] = info["atime"] if "atime" in info else time.time() + data["st_ctime"] = info["ctime"] if "ctime" in info else time.time() + data["st_mtime"] = info["mtime"] if "mtime" in info else time.time() + return data + + def readdir(self, path, fh): + logger.debug("readdir %s", path) + path = "".join([self.root, path.lstrip("/")]) + files = self.fs.ls(path, False) + files = [os.path.basename(f.rstrip("/")) for f in files] + return [".", ".."] + files + + def mkdir(self, path, mode): + path = "".join([self.root, path.lstrip("/")]) + self.fs.mkdir(path) + return 0 + + def rmdir(self, path): + path = "".join([self.root, path.lstrip("/")]) + self.fs.rmdir(path) + return 0 + + def read(self, path, size, offset, fh): + logger.debug("read %s", (path, size, offset)) + if self._ready_file and path in ["/.fuse_ready", ".fuse_ready"]: + # status indicator + return b"ready" + + f = self.cache[fh] + f.seek(offset) + out = f.read(size) + return out + + def write(self, path, data, offset, fh): + logger.debug("write %s", (path, offset)) + f = self.cache[fh] + f.seek(offset) + f.write(data) + return len(data) + + def create(self, path, flags, fi=None): + logger.debug("create %s", (path, flags)) + fn = "".join([self.root, path.lstrip("/")]) + self.fs.touch(fn) # OS will want to get attributes immediately + f = self.fs.open(fn, "wb") + self.cache[self.counter] = f + self.counter += 1 + return self.counter - 1 + + def open(self, path, flags): + logger.debug("open %s", (path, flags)) + fn = "".join([self.root, path.lstrip("/")]) + if flags % 2 == 0: + # read + mode = "rb" + else: + # write/create + mode = "wb" + self.cache[self.counter] = self.fs.open(fn, mode) + self.counter += 1 + return self.counter - 1 + + def truncate(self, path, length, fh=None): + fn = "".join([self.root, path.lstrip("/")]) + if length != 0: + raise NotImplementedError + # maybe should be no-op since open with write sets size to zero anyway + self.fs.touch(fn) + + def unlink(self, path): + fn = "".join([self.root, path.lstrip("/")]) + try: + self.fs.rm(fn, False) + except (OSError, FileNotFoundError) as exc: + raise FuseOSError(EIO) from exc + + def release(self, path, fh): + try: + if fh in self.cache: + f = self.cache[fh] + f.close() + self.cache.pop(fh) + except Exception as e: + print(e) + return 0 + + def chmod(self, path, mode): + if hasattr(self.fs, "chmod"): + path = "".join([self.root, path.lstrip("/")]) + return self.fs.chmod(path, mode) + raise NotImplementedError + + +def run( + fs, + path, + mount_point, + foreground=True, + threads=False, + ready_file=False, + ops_class=FUSEr, +): + """Mount stuff in a local directory + + This uses fusepy to make it appear as if a given path on an fsspec + instance is in fact resident within the local file-system. + + This requires that fusepy by installed, and that FUSE be available on + the system (typically requiring a package to be installed with + apt, yum, brew, etc.). + + Parameters + ---------- + fs: file-system instance + From one of the compatible implementations + path: str + Location on that file-system to regard as the root directory to + mount. Note that you typically should include the terminating "/" + character. + mount_point: str + An empty directory on the local file-system where the contents of + the remote path will appear. + foreground: bool + Whether or not calling this function will block. Operation will + typically be more stable if True. + threads: bool + Whether or not to create threads when responding to file operations + within the mounter directory. Operation will typically be more + stable if False. + ready_file: bool + Whether the FUSE process is ready. The ``.fuse_ready`` file will + exist in the ``mount_point`` directory if True. Debugging purpose. + ops_class: FUSEr or Subclass of FUSEr + To override the default behavior of FUSEr. For Example, logging + to file. + + """ + func = lambda: FUSE( + ops_class(fs, path, ready_file=ready_file), + mount_point, + nothreads=not threads, + foreground=foreground, + ) + if not foreground: + th = threading.Thread(target=func) + th.daemon = True + th.start() + return th + else: # pragma: no cover + try: + func() + except KeyboardInterrupt: + pass + + +def main(args): + """Mount filesystem from chained URL to MOUNT_POINT. + + Examples: + + python3 -m fsspec.fuse memory /usr/share /tmp/mem + + python3 -m fsspec.fuse local /tmp/source /tmp/local \\ + -l /tmp/fsspecfuse.log + + You can also mount chained-URLs and use special settings: + + python3 -m fsspec.fuse 'filecache::zip::file://data.zip' \\ + / /tmp/zip \\ + -o 'filecache-cache_storage=/tmp/simplecache' + + You can specify the type of the setting by using `[int]` or `[bool]`, + (`true`, `yes`, `1` represents the Boolean value `True`): + + python3 -m fsspec.fuse 'simplecache::ftp://ftp1.at.proftpd.org' \\ + /historic/packages/RPMS /tmp/ftp \\ + -o 'simplecache-cache_storage=/tmp/simplecache' \\ + -o 'simplecache-check_files=false[bool]' \\ + -o 'ftp-listings_expiry_time=60[int]' \\ + -o 'ftp-username=anonymous' \\ + -o 'ftp-password=xieyanbo' + """ + + class RawDescriptionArgumentParser(argparse.ArgumentParser): + def format_help(self): + usage = super().format_help() + parts = usage.split("\n\n") + parts[1] = self.description.rstrip() + return "\n\n".join(parts) + + parser = RawDescriptionArgumentParser(prog="fsspec.fuse", description=main.__doc__) + parser.add_argument("--version", action="version", version=__version__) + parser.add_argument("url", type=str, help="fs url") + parser.add_argument("source_path", type=str, help="source directory in fs") + parser.add_argument("mount_point", type=str, help="local directory") + parser.add_argument( + "-o", + "--option", + action="append", + help="Any options of protocol included in the chained URL", + ) + parser.add_argument( + "-l", "--log-file", type=str, help="Logging FUSE debug info (Default: '')" + ) + parser.add_argument( + "-f", + "--foreground", + action="store_false", + help="Running in foreground or not (Default: False)", + ) + parser.add_argument( + "-t", + "--threads", + action="store_false", + help="Running with threads support (Default: False)", + ) + parser.add_argument( + "-r", + "--ready-file", + action="store_false", + help="The `.fuse_ready` file will exist after FUSE is ready. " + "(Debugging purpose, Default: False)", + ) + args = parser.parse_args(args) + + kwargs = {} + for item in args.option or []: + key, sep, value = item.partition("=") + if not sep: + parser.error(message=f"Wrong option: {item!r}") + val = value.lower() + if val.endswith("[int]"): + value = int(value[: -len("[int]")]) + elif val.endswith("[bool]"): + value = val[: -len("[bool]")] in ["1", "yes", "true"] + + if "-" in key: + fs_name, setting_name = key.split("-", 1) + if fs_name in kwargs: + kwargs[fs_name][setting_name] = value + else: + kwargs[fs_name] = {setting_name: value} + else: + kwargs[key] = value + + if args.log_file: + logging.basicConfig( + level=logging.DEBUG, + filename=args.log_file, + format="%(asctime)s %(message)s", + ) + + class LoggingFUSEr(FUSEr, LoggingMixIn): + pass + + fuser = LoggingFUSEr + else: + fuser = FUSEr + + fs, url_path = url_to_fs(args.url, **kwargs) + logger.debug("Mounting %s to %s", url_path, str(args.mount_point)) + run( + fs, + args.source_path, + args.mount_point, + foreground=args.foreground, + threads=args.threads, + ready_file=args.ready_file, + ops_class=fuser, + ) + + +if __name__ == "__main__": + import sys + + main(sys.argv[1:]) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/generic.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/generic.py new file mode 100644 index 0000000000000000000000000000000000000000..2156d354a87bcf044bef04a62b4d61f5864cf33c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/generic.py @@ -0,0 +1,394 @@ +from __future__ import annotations + +import inspect +import logging +import os +import shutil +import uuid + +from .asyn import AsyncFileSystem, _run_coros_in_chunks, sync_wrapper +from .callbacks import DEFAULT_CALLBACK +from .core import filesystem, get_filesystem_class, split_protocol, url_to_fs + +_generic_fs = {} +logger = logging.getLogger("fsspec.generic") + + +def set_generic_fs(protocol, **storage_options): + """Populate the dict used for method=="generic" lookups""" + _generic_fs[protocol] = filesystem(protocol, **storage_options) + + +def _resolve_fs(url, method, protocol=None, storage_options=None): + """Pick instance of backend FS""" + url = url[0] if isinstance(url, (list, tuple)) else url + protocol = protocol or split_protocol(url)[0] + storage_options = storage_options or {} + if method == "default": + return filesystem(protocol) + if method == "generic": + return _generic_fs[protocol] + if method == "current": + cls = get_filesystem_class(protocol) + return cls.current() + if method == "options": + fs, _ = url_to_fs(url, **storage_options.get(protocol, {})) + return fs + raise ValueError(f"Unknown FS resolution method: {method}") + + +def rsync( + source, + destination, + delete_missing=False, + source_field="size", + dest_field="size", + update_cond="different", + inst_kwargs=None, + fs=None, + **kwargs, +): + """Sync files between two directory trees + + (experimental) + + Parameters + ---------- + source: str + Root of the directory tree to take files from. This must be a directory, but + do not include any terminating "/" character + destination: str + Root path to copy into. The contents of this location should be + identical to the contents of ``source`` when done. This will be made a + directory, and the terminal "/" should not be included. + delete_missing: bool + If there are paths in the destination that don't exist in the + source and this is True, delete them. Otherwise, leave them alone. + source_field: str | callable + If ``update_field`` is "different", this is the key in the info + of source files to consider for difference. Maybe a function of the + info dict. + dest_field: str | callable + If ``update_field`` is "different", this is the key in the info + of destination files to consider for difference. May be a function of + the info dict. + update_cond: "different"|"always"|"never" + If "always", every file is copied, regardless of whether it exists in + the destination. If "never", files that exist in the destination are + not copied again. If "different" (default), only copy if the info + fields given by ``source_field`` and ``dest_field`` (usually "size") + are different. Other comparisons may be added in the future. + inst_kwargs: dict|None + If ``fs`` is None, use this set of keyword arguments to make a + GenericFileSystem instance + fs: GenericFileSystem|None + Instance to use if explicitly given. The instance defines how to + to make downstream file system instances from paths. + + Returns + ------- + dict of the copy operations that were performed, {source: destination} + """ + fs = fs or GenericFileSystem(**(inst_kwargs or {})) + source = fs._strip_protocol(source) + destination = fs._strip_protocol(destination) + allfiles = fs.find(source, withdirs=True, detail=True) + if not fs.isdir(source): + raise ValueError("Can only rsync on a directory") + otherfiles = fs.find(destination, withdirs=True, detail=True) + dirs = [ + a + for a, v in allfiles.items() + if v["type"] == "directory" and a.replace(source, destination) not in otherfiles + ] + logger.debug(f"{len(dirs)} directories to create") + if dirs: + fs.make_many_dirs( + [dirn.replace(source, destination) for dirn in dirs], exist_ok=True + ) + allfiles = {a: v for a, v in allfiles.items() if v["type"] == "file"} + logger.debug(f"{len(allfiles)} files to consider for copy") + to_delete = [ + o + for o, v in otherfiles.items() + if o.replace(destination, source) not in allfiles and v["type"] == "file" + ] + for k, v in allfiles.copy().items(): + otherfile = k.replace(source, destination) + if otherfile in otherfiles: + if update_cond == "always": + allfiles[k] = otherfile + elif update_cond == "different": + inf1 = source_field(v) if callable(source_field) else v[source_field] + v2 = otherfiles[otherfile] + inf2 = dest_field(v2) if callable(dest_field) else v2[dest_field] + if inf1 != inf2: + # details mismatch, make copy + allfiles[k] = otherfile + else: + # details match, don't copy + allfiles.pop(k) + else: + # file not in target yet + allfiles[k] = otherfile + logger.debug(f"{len(allfiles)} files to copy") + if allfiles: + source_files, target_files = zip(*allfiles.items()) + fs.cp(source_files, target_files, **kwargs) + logger.debug(f"{len(to_delete)} files to delete") + if delete_missing and to_delete: + fs.rm(to_delete) + return allfiles + + +class GenericFileSystem(AsyncFileSystem): + """Wrapper over all other FS types + + + + This implementation is a single unified interface to be able to run FS operations + over generic URLs, and dispatch to the specific implementations using the URL + protocol prefix. + + Note: instances of this FS are always async, even if you never use it with any async + backend. + """ + + protocol = "generic" # there is no real reason to ever use a protocol with this FS + + def __init__(self, default_method="default", storage_options=None, **kwargs): + """ + + Parameters + ---------- + default_method: str (optional) + Defines how to configure backend FS instances. Options are: + - "default": instantiate like FSClass(), with no + extra arguments; this is the default instance of that FS, and can be + configured via the config system + - "generic": takes instances from the `_generic_fs` dict in this module, + which you must populate before use. Keys are by protocol + - "options": expects storage_options, a dict mapping protocol to + kwargs to use when constructing the filesystem + - "current": takes the most recently instantiated version of each FS + """ + self.method = default_method + self.st_opts = storage_options + super().__init__(**kwargs) + + def _parent(self, path): + fs = _resolve_fs(path, self.method, storage_options=self.st_opts) + return fs.unstrip_protocol(fs._parent(path)) + + def _strip_protocol(self, path): + # normalization only + fs = _resolve_fs(path, self.method, storage_options=self.st_opts) + return fs.unstrip_protocol(fs._strip_protocol(path)) + + async def _find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + fs = _resolve_fs(path, self.method, storage_options=self.st_opts) + if fs.async_impl: + out = await fs._find( + path, maxdepth=maxdepth, withdirs=withdirs, detail=True, **kwargs + ) + else: + out = fs.find( + path, maxdepth=maxdepth, withdirs=withdirs, detail=True, **kwargs + ) + result = {} + for k, v in out.items(): + v = v.copy() # don't corrupt target FS dircache + name = fs.unstrip_protocol(k) + v["name"] = name + result[name] = v + if detail: + return result + return list(result) + + async def _info(self, url, **kwargs): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + out = await fs._info(url, **kwargs) + else: + out = fs.info(url, **kwargs) + out = out.copy() # don't edit originals + out["name"] = fs.unstrip_protocol(out["name"]) + return out + + async def _ls( + self, + url, + detail=True, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + out = await fs._ls(url, detail=True, **kwargs) + else: + out = fs.ls(url, detail=True, **kwargs) + out = [o.copy() for o in out] # don't edit originals + for o in out: + o["name"] = fs.unstrip_protocol(o["name"]) + if detail: + return out + else: + return [o["name"] for o in out] + + async def _cat_file( + self, + url, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + return await fs._cat_file(url, **kwargs) + else: + return fs.cat_file(url, **kwargs) + + async def _pipe_file( + self, + path, + value, + **kwargs, + ): + fs = _resolve_fs(path, self.method, storage_options=self.st_opts) + if fs.async_impl: + return await fs._pipe_file(path, value, **kwargs) + else: + return fs.pipe_file(path, value, **kwargs) + + async def _rm(self, url, **kwargs): + urls = url + if isinstance(urls, str): + urls = [urls] + fs = _resolve_fs(urls[0], self.method) + if fs.async_impl: + await fs._rm(urls, **kwargs) + else: + fs.rm(url, **kwargs) + + async def _makedirs(self, path, exist_ok=False): + logger.debug("Make dir %s", path) + fs = _resolve_fs(path, self.method, storage_options=self.st_opts) + if fs.async_impl: + await fs._makedirs(path, exist_ok=exist_ok) + else: + fs.makedirs(path, exist_ok=exist_ok) + + def rsync(self, source, destination, **kwargs): + """Sync files between two directory trees + + See `func:rsync` for more details. + """ + rsync(source, destination, fs=self, **kwargs) + + async def _cp_file( + self, + url, + url2, + blocksize=2**20, + callback=DEFAULT_CALLBACK, + tempdir: str | None = None, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + fs2 = _resolve_fs(url2, self.method) + if fs is fs2: + # pure remote + if fs.async_impl: + return await fs._copy(url, url2, **kwargs) + else: + return fs.copy(url, url2, **kwargs) + await copy_file_op(fs, [url], fs2, [url2], tempdir, 1, on_error="raise") + + async def _make_many_dirs(self, urls, exist_ok=True): + fs = _resolve_fs(urls[0], self.method) + if fs.async_impl: + coros = [fs._makedirs(u, exist_ok=exist_ok) for u in urls] + await _run_coros_in_chunks(coros) + else: + for u in urls: + fs.makedirs(u, exist_ok=exist_ok) + + make_many_dirs = sync_wrapper(_make_many_dirs) + + async def _copy( + self, + path1: list[str], + path2: list[str], + recursive: bool = False, + on_error: str = "ignore", + maxdepth: int | None = None, + batch_size: int | None = None, + tempdir: str | None = None, + **kwargs, + ): + # TODO: special case for one FS being local, which can use get/put + # TODO: special case for one being memFS, which can use cat/pipe + if recursive: + raise NotImplementedError("Please use fsspec.generic.rsync") + path1 = [path1] if isinstance(path1, str) else path1 + path2 = [path2] if isinstance(path2, str) else path2 + + fs = _resolve_fs(path1, self.method) + fs2 = _resolve_fs(path2, self.method) + + if fs is fs2: + if fs.async_impl: + return await fs._copy(path1, path2, **kwargs) + else: + return fs.copy(path1, path2, **kwargs) + + await copy_file_op( + fs, path1, fs2, path2, tempdir, batch_size, on_error=on_error + ) + + +async def copy_file_op( + fs1, url1, fs2, url2, tempdir=None, batch_size=20, on_error="ignore" +): + import tempfile + + tempdir = tempdir or tempfile.mkdtemp() + try: + coros = [ + _copy_file_op( + fs1, + u1, + fs2, + u2, + os.path.join(tempdir, uuid.uuid4().hex), + ) + for u1, u2 in zip(url1, url2) + ] + out = await _run_coros_in_chunks( + coros, batch_size=batch_size, return_exceptions=True + ) + finally: + shutil.rmtree(tempdir) + if on_error == "return": + return out + elif on_error == "raise": + for o in out: + if isinstance(o, Exception): + raise o + + +async def _copy_file_op(fs1, url1, fs2, url2, local, on_error="ignore"): + if fs1.async_impl: + await fs1._get_file(url1, local) + else: + fs1.get_file(url1, local) + if fs2.async_impl: + await fs2._put_file(local, url2) + else: + fs2.put_file(local, url2) + os.unlink(local) + logger.debug("Copy %s -> %s; done", url1, url2) + + +async def maybe_await(cor): + if inspect.iscoroutine(cor): + return await cor + else: + return cor diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/gui.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/gui.py new file mode 100644 index 0000000000000000000000000000000000000000..9d914c8beb6cabb2c2700eb8eee31028559be2bd --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/gui.py @@ -0,0 +1,417 @@ +import ast +import contextlib +import logging +import os +import re +from collections.abc import Sequence +from typing import ClassVar + +import panel as pn + +from .core import OpenFile, get_filesystem_class, split_protocol +from .registry import known_implementations + +pn.extension() +logger = logging.getLogger("fsspec.gui") + + +class SigSlot: + """Signal-slot mixin, for Panel event passing + + Include this class in a widget manager's superclasses to be able to + register events and callbacks on Panel widgets managed by that class. + + The method ``_register`` should be called as widgets are added, and external + code should call ``connect`` to associate callbacks. + + By default, all signals emit a DEBUG logging statement. + """ + + # names of signals that this class may emit each of which must be + # set by _register for any new instance + signals: ClassVar[Sequence[str]] = [] + # names of actions that this class may respond to + slots: ClassVar[Sequence[str]] = [] + + # each of which must be a method name + + def __init__(self): + self._ignoring_events = False + self._sigs = {} + self._map = {} + self._setup() + + def _setup(self): + """Create GUI elements and register signals""" + self.panel = pn.pane.PaneBase() + # no signals to set up in the base class + + def _register( + self, widget, name, thing="value", log_level=logging.DEBUG, auto=False + ): + """Watch the given attribute of a widget and assign it a named event + + This is normally called at the time a widget is instantiated, in the + class which owns it. + + Parameters + ---------- + widget : pn.layout.Panel or None + Widget to watch. If None, an anonymous signal not associated with + any widget. + name : str + Name of this event + thing : str + Attribute of the given widget to watch + log_level : int + When the signal is triggered, a logging event of the given level + will be fired in the dfviz logger. + auto : bool + If True, automatically connects with a method in this class of the + same name. + """ + if name not in self.signals: + raise ValueError(f"Attempt to assign an undeclared signal: {name}") + self._sigs[name] = { + "widget": widget, + "callbacks": [], + "thing": thing, + "log": log_level, + } + wn = "-".join( + [ + getattr(widget, "name", str(widget)) if widget is not None else "none", + thing, + ] + ) + self._map[wn] = name + if widget is not None: + widget.param.watch(self._signal, thing, onlychanged=True) + if auto and hasattr(self, name): + self.connect(name, getattr(self, name)) + + def _repr_mimebundle_(self, *args, **kwargs): + """Display in a notebook or a server""" + try: + return self.panel._repr_mimebundle_(*args, **kwargs) + except (ValueError, AttributeError) as exc: + raise NotImplementedError( + "Panel does not seem to be set up properly" + ) from exc + + def connect(self, signal, slot): + """Associate call back with given event + + The callback must be a function which takes the "new" value of the + watched attribute as the only parameter. If the callback return False, + this cancels any further processing of the given event. + + Alternatively, the callback can be a string, in which case it means + emitting the correspondingly-named event (i.e., connect to self) + """ + self._sigs[signal]["callbacks"].append(slot) + + def _signal(self, event): + """This is called by a an action on a widget + + Within an self.ignore_events context, nothing happens. + + Tests can execute this method by directly changing the values of + widget components. + """ + if not self._ignoring_events: + wn = "-".join([event.obj.name, event.name]) + if wn in self._map and self._map[wn] in self._sigs: + self._emit(self._map[wn], event.new) + + @contextlib.contextmanager + def ignore_events(self): + """Temporarily turn off events processing in this instance + + (does not propagate to children) + """ + self._ignoring_events = True + try: + yield + finally: + self._ignoring_events = False + + def _emit(self, sig, value=None): + """An event happened, call its callbacks + + This method can be used in tests to simulate message passing without + directly changing visual elements. + + Calling of callbacks will halt whenever one returns False. + """ + logger.log(self._sigs[sig]["log"], f"{sig}: {value}") + for callback in self._sigs[sig]["callbacks"]: + if isinstance(callback, str): + self._emit(callback) + else: + try: + # running callbacks should not break the interface + ret = callback(value) + if ret is False: + break + except Exception as e: + logger.exception( + "Exception (%s) while executing callback for signal: %s", + e, + sig, + ) + + def show(self, threads=False): + """Open a new browser tab and display this instance's interface""" + self.panel.show(threads=threads, verbose=False) + return self + + +class SingleSelect(SigSlot): + """A multiselect which only allows you to select one item for an event""" + + signals = ["_selected", "selected"] # the first is internal + slots = ["set_options", "set_selection", "add", "clear", "select"] + + def __init__(self, **kwargs): + self.kwargs = kwargs + super().__init__() + + def _setup(self): + self.panel = pn.widgets.MultiSelect(**self.kwargs) + self._register(self.panel, "_selected", "value") + self._register(None, "selected") + self.connect("_selected", self.select_one) + + def _signal(self, *args, **kwargs): + super()._signal(*args, **kwargs) + + def select_one(self, *_): + with self.ignore_events(): + val = [self.panel.value[-1]] if self.panel.value else [] + self.panel.value = val + self._emit("selected", self.panel.value) + + def set_options(self, options): + self.panel.options = options + + def clear(self): + self.panel.options = [] + + @property + def value(self): + return self.panel.value + + def set_selection(self, selection): + self.panel.value = [selection] + + +class FileSelector(SigSlot): + """Panel-based graphical file selector widget + + Instances of this widget are interactive and can be displayed in jupyter by having + them as the output of a cell, or in a separate browser tab using ``.show()``. + """ + + signals = [ + "protocol_changed", + "selection_changed", + "directory_entered", + "home_clicked", + "up_clicked", + "go_clicked", + "filters_changed", + ] + slots = ["set_filters", "go_home"] + + def __init__(self, url=None, filters=None, ignore=None, kwargs=None): + """ + + Parameters + ---------- + url : str (optional) + Initial value of the URL to populate the dialog; should include protocol + filters : list(str) (optional) + File endings to include in the listings. If not included, all files are + allowed. Does not affect directories. + If given, the endings will appear as checkboxes in the interface + ignore : list(str) (optional) + Regex(s) of file basename patterns to ignore, e.g., "\\." for typical + hidden files on posix + kwargs : dict (optional) + To pass to file system instance + """ + if url: + self.init_protocol, url = split_protocol(url) + else: + self.init_protocol, url = "file", os.getcwd() + self.init_url = url + self.init_kwargs = (kwargs if isinstance(kwargs, str) else str(kwargs)) or "{}" + self.filters = filters + self.ignore = [re.compile(i) for i in ignore or []] + self._fs = None + super().__init__() + + def _setup(self): + self.url = pn.widgets.TextInput( + name="url", + value=self.init_url, + align="end", + sizing_mode="stretch_width", + width_policy="max", + ) + self.protocol = pn.widgets.Select( + options=sorted(known_implementations), + value=self.init_protocol, + name="protocol", + align="center", + ) + self.kwargs = pn.widgets.TextInput( + name="kwargs", value=self.init_kwargs, align="center" + ) + self.go = pn.widgets.Button(name="⇨", align="end", width=45) + self.main = SingleSelect(size=10) + self.home = pn.widgets.Button(name="🏠", width=40, height=30, align="end") + self.up = pn.widgets.Button(name="‹", width=30, height=30, align="end") + + self._register(self.protocol, "protocol_changed", auto=True) + self._register(self.go, "go_clicked", "clicks", auto=True) + self._register(self.up, "up_clicked", "clicks", auto=True) + self._register(self.home, "home_clicked", "clicks", auto=True) + self._register(None, "selection_changed") + self.main.connect("selected", self.selection_changed) + self._register(None, "directory_entered") + self.prev_protocol = self.protocol.value + self.prev_kwargs = self.storage_options + + self.filter_sel = pn.widgets.CheckBoxGroup( + value=[], options=[], inline=False, align="end", width_policy="min" + ) + self._register(self.filter_sel, "filters_changed", auto=True) + + self.panel = pn.Column( + pn.Row(self.protocol, self.kwargs), + pn.Row(self.home, self.up, self.url, self.go, self.filter_sel), + self.main.panel, + ) + self.set_filters(self.filters) + self.go_clicked() + + def set_filters(self, filters=None): + self.filters = filters + if filters: + self.filter_sel.options = filters + self.filter_sel.value = filters + else: + self.filter_sel.options = [] + self.filter_sel.value = [] + + @property + def storage_options(self): + """Value of the kwargs box as a dictionary""" + return ast.literal_eval(self.kwargs.value) or {} + + @property + def fs(self): + """Current filesystem instance""" + if self._fs is None: + cls = get_filesystem_class(self.protocol.value) + self._fs = cls(**self.storage_options) + return self._fs + + @property + def urlpath(self): + """URL of currently selected item""" + return ( + (f"{self.protocol.value}://{self.main.value[0]}") + if self.main.value + else None + ) + + def open_file(self, mode="rb", compression=None, encoding=None): + """Create OpenFile instance for the currently selected item + + For example, in a notebook you might do something like + + .. code-block:: + + [ ]: sel = FileSelector(); sel + + # user selects their file + + [ ]: with sel.open_file('rb') as f: + ... out = f.read() + + Parameters + ---------- + mode: str (optional) + Open mode for the file. + compression: str (optional) + The interact with the file as compressed. Set to 'infer' to guess + compression from the file ending + encoding: str (optional) + If using text mode, use this encoding; defaults to UTF8. + """ + if self.urlpath is None: + raise ValueError("No file selected") + return OpenFile(self.fs, self.urlpath, mode, compression, encoding) + + def filters_changed(self, values): + self.filters = values + self.go_clicked() + + def selection_changed(self, *_): + if self.urlpath is None: + return + if self.fs.isdir(self.urlpath): + self.url.value = self.fs._strip_protocol(self.urlpath) + self.go_clicked() + + def go_clicked(self, *_): + if ( + self.prev_protocol != self.protocol.value + or self.prev_kwargs != self.storage_options + ): + self._fs = None # causes fs to be recreated + self.prev_protocol = self.protocol.value + self.prev_kwargs = self.storage_options + listing = sorted( + self.fs.ls(self.url.value, detail=True), key=lambda x: x["name"] + ) + listing = [ + l + for l in listing + if not any(i.match(l["name"].rsplit("/", 1)[-1]) for i in self.ignore) + ] + folders = { + "📁 " + o["name"].rsplit("/", 1)[-1]: o["name"] + for o in listing + if o["type"] == "directory" + } + files = { + "📄 " + o["name"].rsplit("/", 1)[-1]: o["name"] + for o in listing + if o["type"] == "file" + } + if self.filters: + files = { + k: v + for k, v in files.items() + if any(v.endswith(ext) for ext in self.filters) + } + self.main.set_options(dict(**folders, **files)) + + def protocol_changed(self, *_): + self._fs = None + self.main.options = [] + self.url.value = "" + + def home_clicked(self, *_): + self.protocol.value = self.init_protocol + self.kwargs.value = self.init_kwargs + self.url.value = self.init_url + self.go_clicked() + + def up_clicked(self, *_): + self.url.value = self.fs._parent(self.url.value) + self.go_clicked() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/arrow.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/arrow.py new file mode 100644 index 0000000000000000000000000000000000000000..530df901a7225bab4afb9f08d06540bc18e91aef --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/arrow.py @@ -0,0 +1,304 @@ +import errno +import io +import os +import secrets +import shutil +from contextlib import suppress +from functools import cached_property, wraps +from urllib.parse import parse_qs + +from fsspec.spec import AbstractFileSystem +from fsspec.utils import ( + get_package_version_without_import, + infer_storage_options, + mirror_from, + tokenize, +) + + +def wrap_exceptions(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except OSError as exception: + if not exception.args: + raise + + message, *args = exception.args + if isinstance(message, str) and "does not exist" in message: + raise FileNotFoundError(errno.ENOENT, message) from exception + else: + raise + + return wrapper + + +PYARROW_VERSION = None + + +class ArrowFSWrapper(AbstractFileSystem): + """FSSpec-compatible wrapper of pyarrow.fs.FileSystem. + + Parameters + ---------- + fs : pyarrow.fs.FileSystem + + """ + + root_marker = "/" + + def __init__(self, fs, **kwargs): + global PYARROW_VERSION + PYARROW_VERSION = get_package_version_without_import("pyarrow") + self.fs = fs + super().__init__(**kwargs) + + @property + def protocol(self): + return self.fs.type_name + + @cached_property + def fsid(self): + return "hdfs_" + tokenize(self.fs.host, self.fs.port) + + @classmethod + def _strip_protocol(cls, path): + ops = infer_storage_options(path) + path = ops["path"] + if path.startswith("//"): + # special case for "hdfs://path" (without the triple slash) + path = path[1:] + return path + + def ls(self, path, detail=False, **kwargs): + path = self._strip_protocol(path) + from pyarrow.fs import FileSelector + + entries = [ + self._make_entry(entry) + for entry in self.fs.get_file_info(FileSelector(path)) + ] + if detail: + return entries + else: + return [entry["name"] for entry in entries] + + def info(self, path, **kwargs): + path = self._strip_protocol(path) + [info] = self.fs.get_file_info([path]) + return self._make_entry(info) + + def exists(self, path): + path = self._strip_protocol(path) + try: + self.info(path) + except FileNotFoundError: + return False + else: + return True + + def _make_entry(self, info): + from pyarrow.fs import FileType + + if info.type is FileType.Directory: + kind = "directory" + elif info.type is FileType.File: + kind = "file" + elif info.type is FileType.NotFound: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), info.path) + else: + kind = "other" + + return { + "name": info.path, + "size": info.size, + "type": kind, + "mtime": info.mtime, + } + + @wrap_exceptions + def cp_file(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1).rstrip("/") + path2 = self._strip_protocol(path2).rstrip("/") + + with self._open(path1, "rb") as lstream: + tmp_fname = f"{path2}.tmp.{secrets.token_hex(6)}" + try: + with self.open(tmp_fname, "wb") as rstream: + shutil.copyfileobj(lstream, rstream) + self.fs.move(tmp_fname, path2) + except BaseException: + with suppress(FileNotFoundError): + self.fs.delete_file(tmp_fname) + raise + + @wrap_exceptions + def mv(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1).rstrip("/") + path2 = self._strip_protocol(path2).rstrip("/") + self.fs.move(path1, path2) + + @wrap_exceptions + def rm_file(self, path): + path = self._strip_protocol(path) + self.fs.delete_file(path) + + @wrap_exceptions + def rm(self, path, recursive=False, maxdepth=None): + path = self._strip_protocol(path).rstrip("/") + if self.isdir(path): + if recursive: + self.fs.delete_dir(path) + else: + raise ValueError("Can't delete directories without recursive=False") + else: + self.fs.delete_file(path) + + @wrap_exceptions + def _open(self, path, mode="rb", block_size=None, seekable=True, **kwargs): + if mode == "rb": + if seekable: + method = self.fs.open_input_file + else: + method = self.fs.open_input_stream + elif mode == "wb": + method = self.fs.open_output_stream + elif mode == "ab": + method = self.fs.open_append_stream + else: + raise ValueError(f"unsupported mode for Arrow filesystem: {mode!r}") + + _kwargs = {} + if mode != "rb" or not seekable: + if int(PYARROW_VERSION.split(".")[0]) >= 4: + # disable compression auto-detection + _kwargs["compression"] = None + stream = method(path, **_kwargs) + + return ArrowFile(self, stream, path, mode, block_size, **kwargs) + + @wrap_exceptions + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if create_parents: + self.makedirs(path, exist_ok=True) + else: + self.fs.create_dir(path, recursive=False) + + @wrap_exceptions + def makedirs(self, path, exist_ok=False): + path = self._strip_protocol(path) + self.fs.create_dir(path, recursive=True) + + @wrap_exceptions + def rmdir(self, path): + path = self._strip_protocol(path) + self.fs.delete_dir(path) + + @wrap_exceptions + def modified(self, path): + path = self._strip_protocol(path) + return self.fs.get_file_info(path).mtime + + def cat_file(self, path, start=None, end=None, **kwargs): + kwargs["seekable"] = start not in [None, 0] + return super().cat_file(path, start=None, end=None, **kwargs) + + def get_file(self, rpath, lpath, **kwargs): + kwargs["seekable"] = False + super().get_file(rpath, lpath, **kwargs) + + +@mirror_from( + "stream", + [ + "read", + "seek", + "tell", + "write", + "readable", + "writable", + "close", + "size", + "seekable", + ], +) +class ArrowFile(io.IOBase): + def __init__(self, fs, stream, path, mode, block_size=None, **kwargs): + self.path = path + self.mode = mode + + self.fs = fs + self.stream = stream + + self.blocksize = self.block_size = block_size + self.kwargs = kwargs + + def __enter__(self): + return self + + def __exit__(self, *args): + return self.close() + + +class HadoopFileSystem(ArrowFSWrapper): + """A wrapper on top of the pyarrow.fs.HadoopFileSystem + to connect it's interface with fsspec""" + + protocol = "hdfs" + + def __init__( + self, + host="default", + port=0, + user=None, + kerb_ticket=None, + replication=3, + extra_conf=None, + **kwargs, + ): + """ + + Parameters + ---------- + host: str + Hostname, IP or "default" to try to read from Hadoop config + port: int + Port to connect on, or default from Hadoop config if 0 + user: str or None + If given, connect as this username + kerb_ticket: str or None + If given, use this ticket for authentication + replication: int + set replication factor of file for write operations. default value is 3. + extra_conf: None or dict + Passed on to HadoopFileSystem + """ + from pyarrow.fs import HadoopFileSystem + + fs = HadoopFileSystem( + host=host, + port=port, + user=user, + kerb_ticket=kerb_ticket, + replication=replication, + extra_conf=extra_conf, + ) + super().__init__(fs=fs, **kwargs) + + @staticmethod + def _get_kwargs_from_urls(path): + ops = infer_storage_options(path) + out = {} + if ops.get("host", None): + out["host"] = ops["host"] + if ops.get("username", None): + out["user"] = ops["username"] + if ops.get("port", None): + out["port"] = ops["port"] + if ops.get("url_query", None): + queries = parse_qs(ops["url_query"]) + if queries.get("replication", None): + out["replication"] = int(queries["replication"][0]) + return out diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..9d106a74c9453be6a75366dc93d46643daf19b5c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py @@ -0,0 +1,114 @@ +import asyncio +import functools +import inspect + +import fsspec +from fsspec.asyn import AsyncFileSystem, running_async + + +def async_wrapper(func, obj=None): + """ + Wraps a synchronous function to make it awaitable. + + Parameters + ---------- + func : callable + The synchronous function to wrap. + obj : object, optional + The instance to bind the function to, if applicable. + + Returns + ------- + coroutine + An awaitable version of the function. + """ + + @functools.wraps(func) + async def wrapper(*args, **kwargs): + return await asyncio.to_thread(func, *args, **kwargs) + + return wrapper + + +class AsyncFileSystemWrapper(AsyncFileSystem): + """ + A wrapper class to convert a synchronous filesystem into an asynchronous one. + + This class takes an existing synchronous filesystem implementation and wraps all + its methods to provide an asynchronous interface. + + Parameters + ---------- + sync_fs : AbstractFileSystem + The synchronous filesystem instance to wrap. + """ + + protocol = "asyncwrapper", "async_wrapper" + cachable = False + + def __init__( + self, + fs=None, + asynchronous=None, + target_protocol=None, + target_options=None, + **kwargs, + ): + if asynchronous is None: + asynchronous = running_async() + super().__init__(asynchronous=asynchronous, **kwargs) + if fs is not None: + self.sync_fs = fs + else: + self.sync_fs = fsspec.filesystem(target_protocol, **target_options) + self.protocol = self.sync_fs.protocol + self._wrap_all_sync_methods() + + @property + def fsid(self): + return f"async_{self.sync_fs.fsid}" + + def _wrap_all_sync_methods(self): + """ + Wrap all synchronous methods of the underlying filesystem with asynchronous versions. + """ + excluded_methods = {"open"} + for method_name in dir(self.sync_fs): + if method_name.startswith("_") or method_name in excluded_methods: + continue + + attr = inspect.getattr_static(self.sync_fs, method_name) + if isinstance(attr, property): + continue + + method = getattr(self.sync_fs, method_name) + if callable(method) and not inspect.iscoroutinefunction(method): + async_method = async_wrapper(method, obj=self) + setattr(self, f"_{method_name}", async_method) + + @classmethod + def wrap_class(cls, sync_fs_class): + """ + Create a new class that can be used to instantiate an AsyncFileSystemWrapper + with lazy instantiation of the underlying synchronous filesystem. + + Parameters + ---------- + sync_fs_class : type + The class of the synchronous filesystem to wrap. + + Returns + ------- + type + A new class that wraps the provided synchronous filesystem class. + """ + + class GeneratedAsyncFileSystemWrapper(cls): + def __init__(self, *args, **kwargs): + sync_fs = sync_fs_class(*args, **kwargs) + super().__init__(sync_fs) + + GeneratedAsyncFileSystemWrapper.__name__ = ( + f"Async{sync_fs_class.__name__}Wrapper" + ) + return GeneratedAsyncFileSystemWrapper diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..6e7c7d88afdddf12f77b26bb635bd8bf1e2bd7f1 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import abc +import hashlib + +from fsspec.implementations.local import make_path_posix + + +class AbstractCacheMapper(abc.ABC): + """Abstract super-class for mappers from remote URLs to local cached + basenames. + """ + + @abc.abstractmethod + def __call__(self, path: str) -> str: ... + + def __eq__(self, other: object) -> bool: + # Identity only depends on class. When derived classes have attributes + # they will need to be included. + return isinstance(other, type(self)) + + def __hash__(self) -> int: + # Identity only depends on class. When derived classes have attributes + # they will need to be included. + return hash(type(self)) + + +class BasenameCacheMapper(AbstractCacheMapper): + """Cache mapper that uses the basename of the remote URL and a fixed number + of directory levels above this. + + The default is zero directory levels, meaning different paths with the same + basename will have the same cached basename. + """ + + def __init__(self, directory_levels: int = 0): + if directory_levels < 0: + raise ValueError( + "BasenameCacheMapper requires zero or positive directory_levels" + ) + self.directory_levels = directory_levels + + # Separator for directories when encoded as strings. + self._separator = "_@_" + + def __call__(self, path: str) -> str: + path = make_path_posix(path) + prefix, *bits = path.rsplit("/", self.directory_levels + 1) + if bits: + return self._separator.join(bits) + else: + return prefix # No separator found, simple filename + + def __eq__(self, other: object) -> bool: + return super().__eq__(other) and self.directory_levels == other.directory_levels + + def __hash__(self) -> int: + return super().__hash__() ^ hash(self.directory_levels) + + +class HashCacheMapper(AbstractCacheMapper): + """Cache mapper that uses a hash of the remote URL.""" + + def __call__(self, path: str) -> str: + return hashlib.sha256(path.encode()).hexdigest() + + +def create_cache_mapper(same_names: bool) -> AbstractCacheMapper: + """Factory method to create cache mapper for backward compatibility with + ``CachingFileSystem`` constructor using ``same_names`` kwarg. + """ + if same_names: + return BasenameCacheMapper() + else: + return HashCacheMapper() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..4a519158d4aca975dcbcc5978aeb57dab0e53cb0 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +import os +import pickle +import time +from typing import TYPE_CHECKING + +from fsspec.utils import atomic_write + +try: + import ujson as json +except ImportError: + if not TYPE_CHECKING: + import json + +if TYPE_CHECKING: + from collections.abc import Iterator + from typing import Any, Literal + + from typing_extensions import TypeAlias + + from .cached import CachingFileSystem + + Detail: TypeAlias = dict[str, Any] + + +class CacheMetadata: + """Cache metadata. + + All reading and writing of cache metadata is performed by this class, + accessing the cached files and blocks is not. + + Metadata is stored in a single file per storage directory in JSON format. + For backward compatibility, also reads metadata stored in pickle format + which is converted to JSON when next saved. + """ + + def __init__(self, storage: list[str]): + """ + + Parameters + ---------- + storage: list[str] + Directories containing cached files, must be at least one. Metadata + is stored in the last of these directories by convention. + """ + if not storage: + raise ValueError("CacheMetadata expects at least one storage location") + + self._storage = storage + self.cached_files: list[Detail] = [{}] + + # Private attribute to force saving of metadata in pickle format rather than + # JSON for use in tests to confirm can read both pickle and JSON formats. + self._force_save_pickle = False + + def _load(self, fn: str) -> Detail: + """Low-level function to load metadata from specific file""" + try: + with open(fn, "r") as f: + loaded = json.load(f) + except ValueError: + with open(fn, "rb") as f: + loaded = pickle.load(f) + for c in loaded.values(): + if isinstance(c.get("blocks"), list): + c["blocks"] = set(c["blocks"]) + return loaded + + def _save(self, metadata_to_save: Detail, fn: str) -> None: + """Low-level function to save metadata to specific file""" + if self._force_save_pickle: + with atomic_write(fn) as f: + pickle.dump(metadata_to_save, f) + else: + with atomic_write(fn, mode="w") as f: + json.dump(metadata_to_save, f) + + def _scan_locations( + self, writable_only: bool = False + ) -> Iterator[tuple[str, str, bool]]: + """Yield locations (filenames) where metadata is stored, and whether + writable or not. + + Parameters + ---------- + writable: bool + Set to True to only yield writable locations. + + Returns + ------- + Yields (str, str, bool) + """ + n = len(self._storage) + for i, storage in enumerate(self._storage): + writable = i == n - 1 + if writable_only and not writable: + continue + yield os.path.join(storage, "cache"), storage, writable + + def check_file( + self, path: str, cfs: CachingFileSystem | None + ) -> Literal[False] | tuple[Detail, str]: + """If path is in cache return its details, otherwise return ``False``. + + If the optional CachingFileSystem is specified then it is used to + perform extra checks to reject possible matches, such as if they are + too old. + """ + for (fn, base, _), cache in zip(self._scan_locations(), self.cached_files): + if path not in cache: + continue + detail = cache[path].copy() + + if cfs is not None: + if cfs.check_files and detail["uid"] != cfs.fs.ukey(path): + # Wrong file as determined by hash of file properties + continue + if cfs.expiry and time.time() - detail["time"] > cfs.expiry: + # Cached file has expired + continue + + fn = os.path.join(base, detail["fn"]) + if os.path.exists(fn): + return detail, fn + return False + + def clear_expired(self, expiry_time: int) -> tuple[list[str], bool]: + """Remove expired metadata from the cache. + + Returns names of files corresponding to expired metadata and a boolean + flag indicating whether the writable cache is empty. Caller is + responsible for deleting the expired files. + """ + expired_files = [] + for path, detail in self.cached_files[-1].copy().items(): + if time.time() - detail["time"] > expiry_time: + fn = detail.get("fn", "") + if not fn: + raise RuntimeError( + f"Cache metadata does not contain 'fn' for {path}" + ) + fn = os.path.join(self._storage[-1], fn) + expired_files.append(fn) + self.cached_files[-1].pop(path) + + if self.cached_files[-1]: + cache_path = os.path.join(self._storage[-1], "cache") + self._save(self.cached_files[-1], cache_path) + + writable_cache_empty = not self.cached_files[-1] + return expired_files, writable_cache_empty + + def load(self) -> None: + """Load all metadata from disk and store in ``self.cached_files``""" + cached_files = [] + for fn, _, _ in self._scan_locations(): + if os.path.exists(fn): + # TODO: consolidate blocks here + cached_files.append(self._load(fn)) + else: + cached_files.append({}) + self.cached_files = cached_files or [{}] + + def on_close_cached_file(self, f: Any, path: str) -> None: + """Perform side-effect actions on closing a cached file. + + The actual closing of the file is the responsibility of the caller. + """ + # File must be writeble, so in self.cached_files[-1] + c = self.cached_files[-1][path] + if c["blocks"] is not True and len(c["blocks"]) * f.blocksize >= f.size: + c["blocks"] = True + + def pop_file(self, path: str) -> str | None: + """Remove metadata of cached file. + + If path is in the cache, return the filename of the cached file, + otherwise return ``None``. Caller is responsible for deleting the + cached file. + """ + details = self.check_file(path, None) + if not details: + return None + _, fn = details + if fn.startswith(self._storage[-1]): + self.cached_files[-1].pop(path) + self.save() + else: + raise PermissionError( + "Can only delete cached file in last, writable cache location" + ) + return fn + + def save(self) -> None: + """Save metadata to disk""" + for (fn, _, writable), cache in zip(self._scan_locations(), self.cached_files): + if not writable: + continue + + if os.path.exists(fn): + cached_files = self._load(fn) + for k, c in cached_files.items(): + if k in cache: + if c["blocks"] is True or cache[k]["blocks"] is True: + c["blocks"] = True + else: + # self.cached_files[*][*]["blocks"] must continue to + # point to the same set object so that updates + # performed by MMapCache are propagated back to + # self.cached_files. + blocks = cache[k]["blocks"] + blocks.update(c["blocks"]) + c["blocks"] = blocks + c["time"] = max(c["time"], cache[k]["time"]) + c["uid"] = cache[k]["uid"] + + # Files can be added to cache after it was written once + for k, c in cache.items(): + if k not in cached_files: + cached_files[k] = c + else: + cached_files = cache + cache = {k: v.copy() for k, v in cached_files.items()} + for c in cache.values(): + if isinstance(c["blocks"], set): + c["blocks"] = list(c["blocks"]) + self._save(cache, fn) + self.cached_files[-1] = cached_files + + def update_file(self, path: str, detail: Detail) -> None: + """Update metadata for specific file in memory, do not save""" + self.cached_files[-1][path] = detail diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cached.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cached.py new file mode 100644 index 0000000000000000000000000000000000000000..649bd712397ed3574fd3f14c2556c9c42b9edec1 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/cached.py @@ -0,0 +1,997 @@ +from __future__ import annotations + +import inspect +import logging +import os +import tempfile +import time +import weakref +from shutil import rmtree +from typing import TYPE_CHECKING, Any, Callable, ClassVar + +from fsspec import AbstractFileSystem, filesystem +from fsspec.callbacks import DEFAULT_CALLBACK +from fsspec.compression import compr +from fsspec.core import BaseCache, MMapCache +from fsspec.exceptions import BlocksizeMismatchError +from fsspec.implementations.cache_mapper import create_cache_mapper +from fsspec.implementations.cache_metadata import CacheMetadata +from fsspec.implementations.local import LocalFileSystem +from fsspec.spec import AbstractBufferedFile +from fsspec.transaction import Transaction +from fsspec.utils import infer_compression + +if TYPE_CHECKING: + from fsspec.implementations.cache_mapper import AbstractCacheMapper + +logger = logging.getLogger("fsspec.cached") + + +class WriteCachedTransaction(Transaction): + def complete(self, commit=True): + rpaths = [f.path for f in self.files] + lpaths = [f.fn for f in self.files] + if commit: + self.fs.put(lpaths, rpaths) + self.files.clear() + self.fs._intrans = False + self.fs._transaction = None + self.fs = None # break cycle + + +class CachingFileSystem(AbstractFileSystem): + """Locally caching filesystem, layer over any other FS + + This class implements chunk-wise local storage of remote files, for quick + access after the initial download. The files are stored in a given + directory with hashes of URLs for the filenames. If no directory is given, + a temporary one is used, which should be cleaned up by the OS after the + process ends. The files themselves are sparse (as implemented in + :class:`~fsspec.caching.MMapCache`), so only the data which is accessed + takes up space. + + Restrictions: + + - the block-size must be the same for each access of a given file, unless + all blocks of the file have already been read + - caching can only be applied to file-systems which produce files + derived from fsspec.spec.AbstractBufferedFile ; LocalFileSystem is also + allowed, for testing + """ + + protocol: ClassVar[str | tuple[str, ...]] = ("blockcache", "cached") + + def __init__( + self, + target_protocol=None, + cache_storage="TMP", + cache_check=10, + check_files=False, + expiry_time=604800, + target_options=None, + fs=None, + same_names: bool | None = None, + compression=None, + cache_mapper: AbstractCacheMapper | None = None, + **kwargs, + ): + """ + + Parameters + ---------- + target_protocol: str (optional) + Target filesystem protocol. Provide either this or ``fs``. + cache_storage: str or list(str) + Location to store files. If "TMP", this is a temporary directory, + and will be cleaned up by the OS when this process ends (or later). + If a list, each location will be tried in the order given, but + only the last will be considered writable. + cache_check: int + Number of seconds between reload of cache metadata + check_files: bool + Whether to explicitly see if the UID of the remote file matches + the stored one before using. Warning: some file systems such as + HTTP cannot reliably give a unique hash of the contents of some + path, so be sure to set this option to False. + expiry_time: int + The time in seconds after which a local copy is considered useless. + Set to falsy to prevent expiry. The default is equivalent to one + week. + target_options: dict or None + Passed to the instantiation of the FS, if fs is None. + fs: filesystem instance + The target filesystem to run against. Provide this or ``protocol``. + same_names: bool (optional) + By default, target URLs are hashed using a ``HashCacheMapper`` so + that files from different backends with the same basename do not + conflict. If this argument is ``true``, a ``BasenameCacheMapper`` + is used instead. Other cache mapper options are available by using + the ``cache_mapper`` keyword argument. Only one of this and + ``cache_mapper`` should be specified. + compression: str (optional) + To decompress on download. Can be 'infer' (guess from the URL name), + one of the entries in ``fsspec.compression.compr``, or None for no + decompression. + cache_mapper: AbstractCacheMapper (optional) + The object use to map from original filenames to cached filenames. + Only one of this and ``same_names`` should be specified. + """ + super().__init__(**kwargs) + if fs is None and target_protocol is None: + raise ValueError( + "Please provide filesystem instance(fs) or target_protocol" + ) + if not (fs is None) ^ (target_protocol is None): + raise ValueError( + "Both filesystems (fs) and target_protocol may not be both given." + ) + if cache_storage == "TMP": + tempdir = tempfile.mkdtemp() + storage = [tempdir] + weakref.finalize(self, self._remove_tempdir, tempdir) + else: + if isinstance(cache_storage, str): + storage = [cache_storage] + else: + storage = cache_storage + os.makedirs(storage[-1], exist_ok=True) + self.storage = storage + self.kwargs = target_options or {} + self.cache_check = cache_check + self.check_files = check_files + self.expiry = expiry_time + self.compression = compression + + # Size of cache in bytes. If None then the size is unknown and will be + # recalculated the next time cache_size() is called. On writes to the + # cache this is reset to None. + self._cache_size = None + + if same_names is not None and cache_mapper is not None: + raise ValueError( + "Cannot specify both same_names and cache_mapper in " + "CachingFileSystem.__init__" + ) + if cache_mapper is not None: + self._mapper = cache_mapper + else: + self._mapper = create_cache_mapper( + same_names if same_names is not None else False + ) + + self.target_protocol = ( + target_protocol + if isinstance(target_protocol, str) + else (fs.protocol if isinstance(fs.protocol, str) else fs.protocol[0]) + ) + self._metadata = CacheMetadata(self.storage) + self.load_cache() + self.fs = fs if fs is not None else filesystem(target_protocol, **self.kwargs) + + def _strip_protocol(path): + # acts as a method, since each instance has a difference target + return self.fs._strip_protocol(type(self)._strip_protocol(path)) + + self._strip_protocol: Callable = _strip_protocol + + @staticmethod + def _remove_tempdir(tempdir): + try: + rmtree(tempdir) + except Exception: + pass + + def _mkcache(self): + os.makedirs(self.storage[-1], exist_ok=True) + + def cache_size(self): + """Return size of cache in bytes. + + If more than one cache directory is in use, only the size of the last + one (the writable cache directory) is returned. + """ + if self._cache_size is None: + cache_dir = self.storage[-1] + self._cache_size = filesystem("file").du(cache_dir, withdirs=True) + return self._cache_size + + def load_cache(self): + """Read set of stored blocks from file""" + self._metadata.load() + self._mkcache() + self.last_cache = time.time() + + def save_cache(self): + """Save set of stored blocks from file""" + self._mkcache() + self._metadata.save() + self.last_cache = time.time() + self._cache_size = None + + def _check_cache(self): + """Reload caches if time elapsed or any disappeared""" + self._mkcache() + if not self.cache_check: + # explicitly told not to bother checking + return + timecond = time.time() - self.last_cache > self.cache_check + existcond = all(os.path.exists(storage) for storage in self.storage) + if timecond or not existcond: + self.load_cache() + + def _check_file(self, path): + """Is path in cache and still valid""" + path = self._strip_protocol(path) + self._check_cache() + return self._metadata.check_file(path, self) + + def clear_cache(self): + """Remove all files and metadata from the cache + + In the case of multiple cache locations, this clears only the last one, + which is assumed to be the read/write one. + """ + rmtree(self.storage[-1]) + self.load_cache() + self._cache_size = None + + def clear_expired_cache(self, expiry_time=None): + """Remove all expired files and metadata from the cache + + In the case of multiple cache locations, this clears only the last one, + which is assumed to be the read/write one. + + Parameters + ---------- + expiry_time: int + The time in seconds after which a local copy is considered useless. + If not defined the default is equivalent to the attribute from the + file caching instantiation. + """ + + if not expiry_time: + expiry_time = self.expiry + + self._check_cache() + + expired_files, writable_cache_empty = self._metadata.clear_expired(expiry_time) + for fn in expired_files: + if os.path.exists(fn): + os.remove(fn) + + if writable_cache_empty: + rmtree(self.storage[-1]) + self.load_cache() + + self._cache_size = None + + def pop_from_cache(self, path): + """Remove cached version of given file + + Deletes local copy of the given (remote) path. If it is found in a cache + location which is not the last, it is assumed to be read-only, and + raises PermissionError + """ + path = self._strip_protocol(path) + fn = self._metadata.pop_file(path) + if fn is not None: + os.remove(fn) + self._cache_size = None + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + """Wrap the target _open + + If the whole file exists in the cache, just open it locally and + return that. + + Otherwise, open the file on the target FS, and make it have a mmap + cache pointing to the location which we determine, in our cache. + The ``blocks`` instance is shared, so as the mmap cache instance + updates, so does the entry in our ``cached_files`` attribute. + We monkey-patch this file, so that when it closes, we call + ``close_and_update`` to save the state of the blocks. + """ + path = self._strip_protocol(path) + + path = self.fs._strip_protocol(path) + if "r" not in mode: + return self.fs._open( + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + **kwargs, + ) + detail = self._check_file(path) + if detail: + # file is in cache + detail, fn = detail + hash, blocks = detail["fn"], detail["blocks"] + if blocks is True: + # stored file is complete + logger.debug("Opening local copy of %s", path) + return open(fn, mode) + # TODO: action where partial file exists in read-only cache + logger.debug("Opening partially cached copy of %s", path) + else: + hash = self._mapper(path) + fn = os.path.join(self.storage[-1], hash) + blocks = set() + detail = { + "original": path, + "fn": hash, + "blocks": blocks, + "time": time.time(), + "uid": self.fs.ukey(path), + } + self._metadata.update_file(path, detail) + logger.debug("Creating local sparse file for %s", path) + + # explicitly submitting the size to the open call will avoid extra + # operations when opening. This is particularly relevant + # for any file that is read over a network, e.g. S3. + size = detail.get("size") + + # call target filesystems open + self._mkcache() + f = self.fs._open( + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + cache_type="none", + size=size, + **kwargs, + ) + + # set size if not already set + if size is None: + detail["size"] = f.size + self._metadata.update_file(path, detail) + + if self.compression: + comp = ( + infer_compression(path) + if self.compression == "infer" + else self.compression + ) + f = compr[comp](f, mode="rb") + if "blocksize" in detail: + if detail["blocksize"] != f.blocksize: + raise BlocksizeMismatchError( + f"Cached file must be reopened with same block" + f" size as original (old: {detail['blocksize']}," + f" new {f.blocksize})" + ) + else: + detail["blocksize"] = f.blocksize + + def _fetch_ranges(ranges): + return self.fs.cat_ranges( + [path] * len(ranges), + [r[0] for r in ranges], + [r[1] for r in ranges], + **kwargs, + ) + + multi_fetcher = None if self.compression else _fetch_ranges + f.cache = MMapCache( + f.blocksize, f._fetch_range, f.size, fn, blocks, multi_fetcher=multi_fetcher + ) + close = f.close + f.close = lambda: self.close_and_update(f, close) + self.save_cache() + return f + + def _parent(self, path): + return self.fs._parent(path) + + def hash_name(self, path: str, *args: Any) -> str: + # Kept for backward compatibility with downstream libraries. + # Ignores extra arguments, previously same_name boolean. + return self._mapper(path) + + def close_and_update(self, f, close): + """Called when a file is closing, so store the set of blocks""" + if f.closed: + return + path = self._strip_protocol(f.path) + self._metadata.on_close_cached_file(f, path) + try: + logger.debug("going to save") + self.save_cache() + logger.debug("saved") + except OSError: + logger.debug("Cache saving failed while closing file") + except NameError: + logger.debug("Cache save failed due to interpreter shutdown") + close() + f.closed = True + + def ls(self, path, detail=True): + return self.fs.ls(path, detail) + + def __getattribute__(self, item): + if item in { + "load_cache", + "_open", + "save_cache", + "close_and_update", + "__init__", + "__getattribute__", + "__reduce__", + "_make_local_details", + "open", + "cat", + "cat_file", + "_cat_file", + "cat_ranges", + "_cat_ranges", + "get", + "read_block", + "tail", + "head", + "info", + "ls", + "exists", + "isfile", + "isdir", + "_check_file", + "_check_cache", + "_mkcache", + "clear_cache", + "clear_expired_cache", + "pop_from_cache", + "local_file", + "_paths_from_path", + "get_mapper", + "open_many", + "commit_many", + "hash_name", + "__hash__", + "__eq__", + "to_json", + "to_dict", + "cache_size", + "pipe_file", + "pipe", + "start_transaction", + "end_transaction", + }: + # all the methods defined in this class. Note `open` here, since + # it calls `_open`, but is actually in superclass + return lambda *args, **kw: getattr(type(self), item).__get__(self)( + *args, **kw + ) + if item in ["__reduce_ex__"]: + raise AttributeError + if item in ["transaction"]: + # property + return type(self).transaction.__get__(self) + if item in ["_cache", "transaction_type"]: + # class attributes + return getattr(type(self), item) + if item == "__class__": + return type(self) + d = object.__getattribute__(self, "__dict__") + fs = d.get("fs", None) # fs is not immediately defined + if item in d: + return d[item] + elif fs is not None: + if item in fs.__dict__: + # attribute of instance + return fs.__dict__[item] + # attributed belonging to the target filesystem + cls = type(fs) + m = getattr(cls, item) + if (inspect.isfunction(m) or inspect.isdatadescriptor(m)) and ( + not hasattr(m, "__self__") or m.__self__ is None + ): + # instance method + return m.__get__(fs, cls) + return m # class method or attribute + else: + # attributes of the superclass, while target is being set up + return super().__getattribute__(item) + + def __eq__(self, other): + """Test for equality.""" + if self is other: + return True + if not isinstance(other, type(self)): + return False + return ( + self.storage == other.storage + and self.kwargs == other.kwargs + and self.cache_check == other.cache_check + and self.check_files == other.check_files + and self.expiry == other.expiry + and self.compression == other.compression + and self._mapper == other._mapper + and self.target_protocol == other.target_protocol + ) + + def __hash__(self): + """Calculate hash.""" + return ( + hash(tuple(self.storage)) + ^ hash(str(self.kwargs)) + ^ hash(self.cache_check) + ^ hash(self.check_files) + ^ hash(self.expiry) + ^ hash(self.compression) + ^ hash(self._mapper) + ^ hash(self.target_protocol) + ) + + +class WholeFileCacheFileSystem(CachingFileSystem): + """Caches whole remote files on first access + + This class is intended as a layer over any other file system, and + will make a local copy of each file accessed, so that all subsequent + reads are local. This is similar to ``CachingFileSystem``, but without + the block-wise functionality and so can work even when sparse files + are not allowed. See its docstring for definition of the init + arguments. + + The class still needs access to the remote store for listing files, + and may refresh cached files. + """ + + protocol = "filecache" + local_file = True + + def open_many(self, open_files, **kwargs): + paths = [of.path for of in open_files] + if "r" in open_files.mode: + self._mkcache() + else: + return [ + LocalTempFile( + self.fs, + path, + mode=open_files.mode, + fn=os.path.join(self.storage[-1], self._mapper(path)), + **kwargs, + ) + for path in paths + ] + + if self.compression: + raise NotImplementedError + details = [self._check_file(sp) for sp in paths] + downpath = [p for p, d in zip(paths, details) if not d] + downfn0 = [ + os.path.join(self.storage[-1], self._mapper(p)) + for p, d in zip(paths, details) + ] # keep these path names for opening later + downfn = [fn for fn, d in zip(downfn0, details) if not d] + if downpath: + # skip if all files are already cached and up to date + self.fs.get(downpath, downfn) + + # update metadata - only happens when downloads are successful + newdetail = [ + { + "original": path, + "fn": self._mapper(path), + "blocks": True, + "time": time.time(), + "uid": self.fs.ukey(path), + } + for path in downpath + ] + for path, detail in zip(downpath, newdetail): + self._metadata.update_file(path, detail) + self.save_cache() + + def firstpart(fn): + # helper to adapt both whole-file and simple-cache + return fn[1] if isinstance(fn, tuple) else fn + + return [ + open(firstpart(fn0) if fn0 else fn1, mode=open_files.mode) + for fn0, fn1 in zip(details, downfn0) + ] + + def commit_many(self, open_files): + self.fs.put([f.fn for f in open_files], [f.path for f in open_files]) + [f.close() for f in open_files] + for f in open_files: + # in case autocommit is off, and so close did not already delete + try: + os.remove(f.name) + except FileNotFoundError: + pass + self._cache_size = None + + def _make_local_details(self, path): + hash = self._mapper(path) + fn = os.path.join(self.storage[-1], hash) + detail = { + "original": path, + "fn": hash, + "blocks": True, + "time": time.time(), + "uid": self.fs.ukey(path), + } + self._metadata.update_file(path, detail) + logger.debug("Copying %s to local cache", path) + return fn + + def cat( + self, + path, + recursive=False, + on_error="raise", + callback=DEFAULT_CALLBACK, + **kwargs, + ): + paths = self.expand_path( + path, recursive=recursive, maxdepth=kwargs.get("maxdepth") + ) + getpaths = [] + storepaths = [] + fns = [] + out = {} + for p in paths.copy(): + try: + detail = self._check_file(p) + if not detail: + fn = self._make_local_details(p) + getpaths.append(p) + storepaths.append(fn) + else: + detail, fn = detail if isinstance(detail, tuple) else (None, detail) + fns.append(fn) + except Exception as e: + if on_error == "raise": + raise + if on_error == "return": + out[p] = e + paths.remove(p) + + if getpaths: + self.fs.get(getpaths, storepaths) + self.save_cache() + + callback.set_size(len(paths)) + for p, fn in zip(paths, fns): + with open(fn, "rb") as f: + out[p] = f.read() + callback.relative_update(1) + if isinstance(path, str) and len(paths) == 1 and recursive is False: + out = out[paths[0]] + return out + + def _open(self, path, mode="rb", **kwargs): + path = self._strip_protocol(path) + if "r" not in mode: + hash = self._mapper(path) + fn = os.path.join(self.storage[-1], hash) + user_specified_kwargs = { + k: v + for k, v in kwargs.items() + # those kwargs were added by open(), we don't want them + if k not in ["autocommit", "block_size", "cache_options"] + } + return LocalTempFile(self, path, mode=mode, fn=fn, **user_specified_kwargs) + detail = self._check_file(path) + if detail: + detail, fn = detail + _, blocks = detail["fn"], detail["blocks"] + if blocks is True: + logger.debug("Opening local copy of %s", path) + + # In order to support downstream filesystems to be able to + # infer the compression from the original filename, like + # the `TarFileSystem`, let's extend the `io.BufferedReader` + # fileobject protocol by adding a dedicated attribute + # `original`. + f = open(fn, mode) + f.original = detail.get("original") + return f + else: + raise ValueError( + f"Attempt to open partially cached file {path}" + f" as a wholly cached file" + ) + else: + fn = self._make_local_details(path) + kwargs["mode"] = mode + + # call target filesystems open + self._mkcache() + if self.compression: + with self.fs._open(path, **kwargs) as f, open(fn, "wb") as f2: + if isinstance(f, AbstractBufferedFile): + # want no type of caching if just downloading whole thing + f.cache = BaseCache(0, f.cache.fetcher, f.size) + comp = ( + infer_compression(path) + if self.compression == "infer" + else self.compression + ) + f = compr[comp](f, mode="rb") + data = True + while data: + block = getattr(f, "blocksize", 5 * 2**20) + data = f.read(block) + f2.write(data) + else: + self.fs.get_file(path, fn) + self.save_cache() + return self._open(path, mode) + + +class SimpleCacheFileSystem(WholeFileCacheFileSystem): + """Caches whole remote files on first access + + This class is intended as a layer over any other file system, and + will make a local copy of each file accessed, so that all subsequent + reads are local. This implementation only copies whole files, and + does not keep any metadata about the download time or file details. + It is therefore safer to use in multi-threaded/concurrent situations. + + This is the only of the caching filesystems that supports write: you will + be given a real local open file, and upon close and commit, it will be + uploaded to the target filesystem; the writability or the target URL is + not checked until that time. + + """ + + protocol = "simplecache" + local_file = True + transaction_type = WriteCachedTransaction + + def __init__(self, **kwargs): + kw = kwargs.copy() + for key in ["cache_check", "expiry_time", "check_files"]: + kw[key] = False + super().__init__(**kw) + for storage in self.storage: + if not os.path.exists(storage): + os.makedirs(storage, exist_ok=True) + + def _check_file(self, path): + self._check_cache() + sha = self._mapper(path) + for storage in self.storage: + fn = os.path.join(storage, sha) + if os.path.exists(fn): + return fn + + def save_cache(self): + pass + + def load_cache(self): + pass + + def pipe_file(self, path, value=None, **kwargs): + if self._intrans: + with self.open(path, "wb") as f: + f.write(value) + else: + super().pipe_file(path, value) + + def ls(self, path, detail=True, **kwargs): + path = self._strip_protocol(path) + details = [] + try: + details = self.fs.ls( + path, detail=True, **kwargs + ).copy() # don't edit original! + except FileNotFoundError as e: + ex = e + else: + ex = None + if self._intrans: + path1 = path.rstrip("/") + "/" + for f in self.transaction.files: + if f.path == path: + details.append( + {"name": path, "size": f.size or f.tell(), "type": "file"} + ) + elif f.path.startswith(path1): + if f.path.count("/") == path1.count("/"): + details.append( + {"name": f.path, "size": f.size or f.tell(), "type": "file"} + ) + else: + dname = "/".join(f.path.split("/")[: path1.count("/") + 1]) + details.append({"name": dname, "size": 0, "type": "directory"}) + if ex is not None and not details: + raise ex + if detail: + return details + return sorted(_["name"] for _ in details) + + def info(self, path, **kwargs): + path = self._strip_protocol(path) + if self._intrans: + f = [_ for _ in self.transaction.files if _.path == path] + if f: + size = os.path.getsize(f[0].fn) if f[0].closed else f[0].tell() + return {"name": path, "size": size, "type": "file"} + f = any(_.path.startswith(path + "/") for _ in self.transaction.files) + if f: + return {"name": path, "size": 0, "type": "directory"} + return self.fs.info(path, **kwargs) + + def pipe(self, path, value=None, **kwargs): + if isinstance(path, str): + self.pipe_file(self._strip_protocol(path), value, **kwargs) + elif isinstance(path, dict): + for k, v in path.items(): + self.pipe_file(self._strip_protocol(k), v, **kwargs) + else: + raise ValueError("path must be str or dict") + + async def _cat_file(self, path, start=None, end=None, **kwargs): + logger.debug("async cat_file %s", path) + path = self._strip_protocol(path) + sha = self._mapper(path) + fn = self._check_file(path) + + if not fn: + fn = os.path.join(self.storage[-1], sha) + await self.fs._get_file(path, fn, **kwargs) + + with open(fn, "rb") as f: # noqa ASYNC230 + if start: + f.seek(start) + size = -1 if end is None else end - f.tell() + return f.read(size) + + async def _cat_ranges( + self, paths, starts, ends, max_gap=None, on_error="return", **kwargs + ): + logger.debug("async cat ranges %s", paths) + lpaths = [] + rset = set() + download = [] + rpaths = [] + for p in paths: + fn = self._check_file(p) + if fn is None and p not in rset: + sha = self._mapper(p) + fn = os.path.join(self.storage[-1], sha) + download.append(fn) + rset.add(p) + rpaths.append(p) + lpaths.append(fn) + if download: + await self.fs._get(rpaths, download, on_error=on_error) + + return LocalFileSystem().cat_ranges( + lpaths, starts, ends, max_gap=max_gap, on_error=on_error, **kwargs + ) + + def cat_ranges( + self, paths, starts, ends, max_gap=None, on_error="return", **kwargs + ): + logger.debug("cat ranges %s", paths) + lpaths = [self._check_file(p) for p in paths] + rpaths = [p for l, p in zip(lpaths, paths) if l is False] + lpaths = [l for l, p in zip(lpaths, paths) if l is False] + self.fs.get(rpaths, lpaths) + return LocalFileSystem().cat_ranges( + paths, starts, ends, max_gap=max_gap, on_error=on_error, **kwargs + ) + + def _open(self, path, mode="rb", **kwargs): + path = self._strip_protocol(path) + sha = self._mapper(path) + + if "r" not in mode: + fn = os.path.join(self.storage[-1], sha) + user_specified_kwargs = { + k: v + for k, v in kwargs.items() + if k not in ["autocommit", "block_size", "cache_options"] + } # those were added by open() + return LocalTempFile( + self, + path, + mode=mode, + autocommit=not self._intrans, + fn=fn, + **user_specified_kwargs, + ) + fn = self._check_file(path) + if fn: + return open(fn, mode) + + fn = os.path.join(self.storage[-1], sha) + logger.debug("Copying %s to local cache", path) + kwargs["mode"] = mode + + self._mkcache() + self._cache_size = None + if self.compression: + with self.fs._open(path, **kwargs) as f, open(fn, "wb") as f2: + if isinstance(f, AbstractBufferedFile): + # want no type of caching if just downloading whole thing + f.cache = BaseCache(0, f.cache.fetcher, f.size) + comp = ( + infer_compression(path) + if self.compression == "infer" + else self.compression + ) + f = compr[comp](f, mode="rb") + data = True + while data: + block = getattr(f, "blocksize", 5 * 2**20) + data = f.read(block) + f2.write(data) + else: + self.fs.get_file(path, fn) + return self._open(path, mode) + + +class LocalTempFile: + """A temporary local file, which will be uploaded on commit""" + + def __init__(self, fs, path, fn, mode="wb", autocommit=True, seek=0, **kwargs): + self.fn = fn + self.fh = open(fn, mode) + self.mode = mode + if seek: + self.fh.seek(seek) + self.path = path + self.size = None + self.fs = fs + self.closed = False + self.autocommit = autocommit + self.kwargs = kwargs + + def __reduce__(self): + # always open in r+b to allow continuing writing at a location + return ( + LocalTempFile, + (self.fs, self.path, self.fn, "r+b", self.autocommit, self.tell()), + ) + + def __enter__(self): + return self.fh + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + # self.size = self.fh.tell() + if self.closed: + return + self.fh.close() + self.closed = True + if self.autocommit: + self.commit() + + def discard(self): + self.fh.close() + os.remove(self.fn) + + def commit(self): + self.fs.put(self.fn, self.path, **self.kwargs) + # we do not delete the local copy, it's still in the cache. + + @property + def name(self): + return self.fn + + def __repr__(self) -> str: + return f"LocalTempFile: {self.path}" + + def __getattr__(self, item): + return getattr(self.fh, item) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dask.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dask.py new file mode 100644 index 0000000000000000000000000000000000000000..3e1276463db6866665e6a0fe114efc247971b57e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dask.py @@ -0,0 +1,152 @@ +import dask +from distributed.client import Client, _get_global_client +from distributed.worker import Worker + +from fsspec import filesystem +from fsspec.spec import AbstractBufferedFile, AbstractFileSystem +from fsspec.utils import infer_storage_options + + +def _get_client(client): + if client is None: + return _get_global_client() + elif isinstance(client, Client): + return client + else: + # e.g., connection string + return Client(client) + + +def _in_worker(): + return bool(Worker._instances) + + +class DaskWorkerFileSystem(AbstractFileSystem): + """View files accessible to a worker as any other remote file-system + + When instances are run on the worker, uses the real filesystem. When + run on the client, they call the worker to provide information or data. + + **Warning** this implementation is experimental, and read-only for now. + """ + + def __init__( + self, target_protocol=None, target_options=None, fs=None, client=None, **kwargs + ): + super().__init__(**kwargs) + if not (fs is None) ^ (target_protocol is None): + raise ValueError( + "Please provide one of filesystem instance (fs) or" + " target_protocol, not both" + ) + self.target_protocol = target_protocol + self.target_options = target_options + self.worker = None + self.client = client + self.fs = fs + self._determine_worker() + + @staticmethod + def _get_kwargs_from_urls(path): + so = infer_storage_options(path) + if "host" in so and "port" in so: + return {"client": f"{so['host']}:{so['port']}"} + else: + return {} + + def _determine_worker(self): + if _in_worker(): + self.worker = True + if self.fs is None: + self.fs = filesystem( + self.target_protocol, **(self.target_options or {}) + ) + else: + self.worker = False + self.client = _get_client(self.client) + self.rfs = dask.delayed(self) + + def mkdir(self, *args, **kwargs): + if self.worker: + self.fs.mkdir(*args, **kwargs) + else: + self.rfs.mkdir(*args, **kwargs).compute() + + def rm(self, *args, **kwargs): + if self.worker: + self.fs.rm(*args, **kwargs) + else: + self.rfs.rm(*args, **kwargs).compute() + + def copy(self, *args, **kwargs): + if self.worker: + self.fs.copy(*args, **kwargs) + else: + self.rfs.copy(*args, **kwargs).compute() + + def mv(self, *args, **kwargs): + if self.worker: + self.fs.mv(*args, **kwargs) + else: + self.rfs.mv(*args, **kwargs).compute() + + def ls(self, *args, **kwargs): + if self.worker: + return self.fs.ls(*args, **kwargs) + else: + return self.rfs.ls(*args, **kwargs).compute() + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + if self.worker: + return self.fs._open( + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + **kwargs, + ) + else: + return DaskFile( + fs=self, + path=path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + **kwargs, + ) + + def fetch_range(self, path, mode, start, end): + if self.worker: + with self._open(path, mode) as f: + f.seek(start) + return f.read(end - start) + else: + return self.rfs.fetch_range(path, mode, start, end).compute() + + +class DaskFile(AbstractBufferedFile): + def __init__(self, mode="rb", **kwargs): + if mode != "rb": + raise ValueError('Remote dask files can only be opened in "rb" mode') + super().__init__(**kwargs) + + def _upload_chunk(self, final=False): + pass + + def _initiate_upload(self): + """Create remote file/upload""" + pass + + def _fetch_range(self, start, end): + """Get the specified set of bytes from remote""" + return self.fs.fetch_range(self.path, self.mode, start, end) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/data.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/data.py new file mode 100644 index 0000000000000000000000000000000000000000..519032305bed633f2ba8a6148076433caf81710b --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/data.py @@ -0,0 +1,58 @@ +import base64 +import io +from typing import Optional +from urllib.parse import unquote + +from fsspec import AbstractFileSystem + + +class DataFileSystem(AbstractFileSystem): + """A handy decoder for data-URLs + + Example + ------- + >>> with fsspec.open("data:,Hello%2C%20World%21") as f: + ... print(f.read()) + b"Hello, World!" + + See https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URLs + """ + + protocol = "data" + + def __init__(self, **kwargs): + """No parameters for this filesystem""" + super().__init__(**kwargs) + + def cat_file(self, path, start=None, end=None, **kwargs): + pref, data = path.split(",", 1) + if pref.endswith("base64"): + return base64.b64decode(data)[start:end] + return unquote(data).encode()[start:end] + + def info(self, path, **kwargs): + pref, name = path.split(",", 1) + data = self.cat_file(path) + mime = pref.split(":", 1)[1].split(";", 1)[0] + return {"name": name, "size": len(data), "type": "file", "mimetype": mime} + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + if "r" not in mode: + raise ValueError("Read only filesystem") + return io.BytesIO(self.cat_file(path)) + + @staticmethod + def encode(data: bytes, mime: Optional[str] = None): + """Format the given data into data-URL syntax + + This version always base64 encodes, even when the data is ascii/url-safe. + """ + return f"data:{mime or ''};base64,{base64.b64encode(data).decode()}" diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dbfs.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dbfs.py new file mode 100644 index 0000000000000000000000000000000000000000..2c56a426e0a79ab2d5811d7531c4f8eba5b9315f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dbfs.py @@ -0,0 +1,468 @@ +import base64 +import urllib + +import requests +import requests.exceptions +from requests.adapters import HTTPAdapter, Retry + +from fsspec import AbstractFileSystem +from fsspec.spec import AbstractBufferedFile + + +class DatabricksException(Exception): + """ + Helper class for exceptions raised in this module. + """ + + def __init__(self, error_code, message, details=None): + """Create a new DatabricksException""" + super().__init__(message) + + self.error_code = error_code + self.message = message + self.details = details + + +class DatabricksFileSystem(AbstractFileSystem): + """ + Get access to the Databricks filesystem implementation over HTTP. + Can be used inside and outside of a databricks cluster. + """ + + def __init__(self, instance, token, **kwargs): + """ + Create a new DatabricksFileSystem. + + Parameters + ---------- + instance: str + The instance URL of the databricks cluster. + For example for an Azure databricks cluster, this + has the form adb-..azuredatabricks.net. + token: str + Your personal token. Find out more + here: https://docs.databricks.com/dev-tools/api/latest/authentication.html + """ + self.instance = instance + self.token = token + self.session = requests.Session() + self.retries = Retry( + total=10, + backoff_factor=0.05, + status_forcelist=[408, 429, 500, 502, 503, 504], + ) + + self.session.mount("https://", HTTPAdapter(max_retries=self.retries)) + self.session.headers.update({"Authorization": f"Bearer {self.token}"}) + + super().__init__(**kwargs) + + def ls(self, path, detail=True, **kwargs): + """ + List the contents of the given path. + + Parameters + ---------- + path: str + Absolute path + detail: bool + Return not only the list of filenames, + but also additional information on file sizes + and types. + """ + out = self._ls_from_cache(path) + if not out: + try: + r = self._send_to_api( + method="get", endpoint="list", json={"path": path} + ) + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + raise FileNotFoundError(e.message) from e + + raise + files = r.get("files", []) + out = [ + { + "name": o["path"], + "type": "directory" if o["is_dir"] else "file", + "size": o["file_size"], + } + for o in files + ] + self.dircache[path] = out + + if detail: + return out + return [o["name"] for o in out] + + def makedirs(self, path, exist_ok=True): + """ + Create a given absolute path and all of its parents. + + Parameters + ---------- + path: str + Absolute path to create + exist_ok: bool + If false, checks if the folder + exists before creating it (and raises an + Exception if this is the case) + """ + if not exist_ok: + try: + # If the following succeeds, the path is already present + self._send_to_api( + method="get", endpoint="get-status", json={"path": path} + ) + raise FileExistsError(f"Path {path} already exists") + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + pass + + try: + self._send_to_api(method="post", endpoint="mkdirs", json={"path": path}) + except DatabricksException as e: + if e.error_code == "RESOURCE_ALREADY_EXISTS": + raise FileExistsError(e.message) from e + + raise + self.invalidate_cache(self._parent(path)) + + def mkdir(self, path, create_parents=True, **kwargs): + """ + Create a given absolute path and all of its parents. + + Parameters + ---------- + path: str + Absolute path to create + create_parents: bool + Whether to create all parents or not. + "False" is not implemented so far. + """ + if not create_parents: + raise NotImplementedError + + self.mkdirs(path, **kwargs) + + def rm(self, path, recursive=False, **kwargs): + """ + Remove the file or folder at the given absolute path. + + Parameters + ---------- + path: str + Absolute path what to remove + recursive: bool + Recursively delete all files in a folder. + """ + try: + self._send_to_api( + method="post", + endpoint="delete", + json={"path": path, "recursive": recursive}, + ) + except DatabricksException as e: + # This is not really an exception, it just means + # not everything was deleted so far + if e.error_code == "PARTIAL_DELETE": + self.rm(path=path, recursive=recursive) + elif e.error_code == "IO_ERROR": + # Using the same exception as the os module would use here + raise OSError(e.message) from e + + raise + self.invalidate_cache(self._parent(path)) + + def mv( + self, source_path, destination_path, recursive=False, maxdepth=None, **kwargs + ): + """ + Move a source to a destination path. + + A note from the original [databricks API manual] + (https://docs.databricks.com/dev-tools/api/latest/dbfs.html#move). + + When moving a large number of files the API call will time out after + approximately 60s, potentially resulting in partially moved data. + Therefore, for operations that move more than 10k files, we strongly + discourage using the DBFS REST API. + + Parameters + ---------- + source_path: str + From where to move (absolute path) + destination_path: str + To where to move (absolute path) + recursive: bool + Not implemented to far. + maxdepth: + Not implemented to far. + """ + if recursive: + raise NotImplementedError + if maxdepth: + raise NotImplementedError + + try: + self._send_to_api( + method="post", + endpoint="move", + json={"source_path": source_path, "destination_path": destination_path}, + ) + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + raise FileNotFoundError(e.message) from e + elif e.error_code == "RESOURCE_ALREADY_EXISTS": + raise FileExistsError(e.message) from e + + raise + self.invalidate_cache(self._parent(source_path)) + self.invalidate_cache(self._parent(destination_path)) + + def _open(self, path, mode="rb", block_size="default", **kwargs): + """ + Overwrite the base class method to make sure to create a DBFile. + All arguments are copied from the base method. + + Only the default blocksize is allowed. + """ + return DatabricksFile(self, path, mode=mode, block_size=block_size, **kwargs) + + def _send_to_api(self, method, endpoint, json): + """ + Send the given json to the DBFS API + using a get or post request (specified by the argument `method`). + + Parameters + ---------- + method: str + Which http method to use for communication; "get" or "post". + endpoint: str + Where to send the request to (last part of the API URL) + json: dict + Dictionary of information to send + """ + if method == "post": + session_call = self.session.post + elif method == "get": + session_call = self.session.get + else: + raise ValueError(f"Do not understand method {method}") + + url = urllib.parse.urljoin(f"https://{self.instance}/api/2.0/dbfs/", endpoint) + + r = session_call(url, json=json) + + # The DBFS API will return a json, also in case of an exception. + # We want to preserve this information as good as possible. + try: + r.raise_for_status() + except requests.HTTPError as e: + # try to extract json error message + # if that fails, fall back to the original exception + try: + exception_json = e.response.json() + except Exception: + raise e from None + + raise DatabricksException(**exception_json) from e + + return r.json() + + def _create_handle(self, path, overwrite=True): + """ + Internal function to create a handle, which can be used to + write blocks of a file to DBFS. + A handle has a unique identifier which needs to be passed + whenever written during this transaction. + The handle is active for 10 minutes - after that a new + write transaction needs to be created. + Make sure to close the handle after you are finished. + + Parameters + ---------- + path: str + Absolute path for this file. + overwrite: bool + If a file already exist at this location, either overwrite + it or raise an exception. + """ + try: + r = self._send_to_api( + method="post", + endpoint="create", + json={"path": path, "overwrite": overwrite}, + ) + return r["handle"] + except DatabricksException as e: + if e.error_code == "RESOURCE_ALREADY_EXISTS": + raise FileExistsError(e.message) from e + + raise + + def _close_handle(self, handle): + """ + Close a handle, which was opened by :func:`_create_handle`. + + Parameters + ---------- + handle: str + Which handle to close. + """ + try: + self._send_to_api(method="post", endpoint="close", json={"handle": handle}) + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + raise FileNotFoundError(e.message) from e + + raise + + def _add_data(self, handle, data): + """ + Upload data to an already opened file handle + (opened by :func:`_create_handle`). + The maximal allowed data size is 1MB after + conversion to base64. + Remember to close the handle when you are finished. + + Parameters + ---------- + handle: str + Which handle to upload data to. + data: bytes + Block of data to add to the handle. + """ + data = base64.b64encode(data).decode() + try: + self._send_to_api( + method="post", + endpoint="add-block", + json={"handle": handle, "data": data}, + ) + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + raise FileNotFoundError(e.message) from e + elif e.error_code == "MAX_BLOCK_SIZE_EXCEEDED": + raise ValueError(e.message) from e + + raise + + def _get_data(self, path, start, end): + """ + Download data in bytes from a given absolute path in a block + from [start, start+length]. + The maximum number of allowed bytes to read is 1MB. + + Parameters + ---------- + path: str + Absolute path to download data from + start: int + Start position of the block + end: int + End position of the block + """ + try: + r = self._send_to_api( + method="get", + endpoint="read", + json={"path": path, "offset": start, "length": end - start}, + ) + return base64.b64decode(r["data"]) + except DatabricksException as e: + if e.error_code == "RESOURCE_DOES_NOT_EXIST": + raise FileNotFoundError(e.message) from e + elif e.error_code in ["INVALID_PARAMETER_VALUE", "MAX_READ_SIZE_EXCEEDED"]: + raise ValueError(e.message) from e + + raise + + def invalidate_cache(self, path=None): + if path is None: + self.dircache.clear() + else: + self.dircache.pop(path, None) + super().invalidate_cache(path) + + +class DatabricksFile(AbstractBufferedFile): + """ + Helper class for files referenced in the DatabricksFileSystem. + """ + + DEFAULT_BLOCK_SIZE = 1 * 2**20 # only allowed block size + + def __init__( + self, + fs, + path, + mode="rb", + block_size="default", + autocommit=True, + cache_type="readahead", + cache_options=None, + **kwargs, + ): + """ + Create a new instance of the DatabricksFile. + + The blocksize needs to be the default one. + """ + if block_size is None or block_size == "default": + block_size = self.DEFAULT_BLOCK_SIZE + + assert block_size == self.DEFAULT_BLOCK_SIZE, ( + f"Only the default block size is allowed, not {block_size}" + ) + + super().__init__( + fs, + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_type=cache_type, + cache_options=cache_options or {}, + **kwargs, + ) + + def _initiate_upload(self): + """Internal function to start a file upload""" + self.handle = self.fs._create_handle(self.path) + + def _upload_chunk(self, final=False): + """Internal function to add a chunk of data to a started upload""" + self.buffer.seek(0) + data = self.buffer.getvalue() + + data_chunks = [ + data[start:end] for start, end in self._to_sized_blocks(len(data)) + ] + + for data_chunk in data_chunks: + self.fs._add_data(handle=self.handle, data=data_chunk) + + if final: + self.fs._close_handle(handle=self.handle) + return True + + def _fetch_range(self, start, end): + """Internal function to download a block of data""" + return_buffer = b"" + length = end - start + for chunk_start, chunk_end in self._to_sized_blocks(length, start): + return_buffer += self.fs._get_data( + path=self.path, start=chunk_start, end=chunk_end + ) + + return return_buffer + + def _to_sized_blocks(self, length, start=0): + """Helper function to split a range from 0 to total_length into bloksizes""" + end = start + length + for data_chunk in range(start, end, self.blocksize): + data_start = data_chunk + data_end = min(end, data_chunk + self.blocksize) + yield data_start, data_end diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dirfs.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dirfs.py new file mode 100644 index 0000000000000000000000000000000000000000..c0623b82fc61ed4780fdbc1d69680f2b96f2b9f3 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/dirfs.py @@ -0,0 +1,388 @@ +from .. import filesystem +from ..asyn import AsyncFileSystem + + +class DirFileSystem(AsyncFileSystem): + """Directory prefix filesystem + + The DirFileSystem is a filesystem-wrapper. It assumes every path it is dealing with + is relative to the `path`. After performing the necessary paths operation it + delegates everything to the wrapped filesystem. + """ + + protocol = "dir" + + def __init__( + self, + path=None, + fs=None, + fo=None, + target_protocol=None, + target_options=None, + **storage_options, + ): + """ + Parameters + ---------- + path: str + Path to the directory. + fs: AbstractFileSystem + An instantiated filesystem to wrap. + target_protocol, target_options: + if fs is none, construct it from these + fo: str + Alternate for path; do not provide both + """ + super().__init__(**storage_options) + if fs is None: + fs = filesystem(protocol=target_protocol, **(target_options or {})) + path = path or fo + + if self.asynchronous and not fs.async_impl: + raise ValueError("can't use asynchronous with non-async fs") + + if fs.async_impl and self.asynchronous != fs.asynchronous: + raise ValueError("both dirfs and fs should be in the same sync/async mode") + + self.path = fs._strip_protocol(path) + self.fs = fs + + def _join(self, path): + if isinstance(path, str): + if not self.path: + return path + if not path: + return self.path + return self.fs.sep.join((self.path, self._strip_protocol(path))) + if isinstance(path, dict): + return {self._join(_path): value for _path, value in path.items()} + return [self._join(_path) for _path in path] + + def _relpath(self, path): + if isinstance(path, str): + if not self.path: + return path + # We need to account for S3FileSystem returning paths that do not + # start with a '/' + if path == self.path or ( + self.path.startswith(self.fs.sep) and path == self.path[1:] + ): + return "" + prefix = self.path + self.fs.sep + if self.path.startswith(self.fs.sep) and not path.startswith(self.fs.sep): + prefix = prefix[1:] + assert path.startswith(prefix) + return path[len(prefix) :] + return [self._relpath(_path) for _path in path] + + # Wrappers below + + @property + def sep(self): + return self.fs.sep + + async def set_session(self, *args, **kwargs): + return await self.fs.set_session(*args, **kwargs) + + async def _rm_file(self, path, **kwargs): + return await self.fs._rm_file(self._join(path), **kwargs) + + def rm_file(self, path, **kwargs): + return self.fs.rm_file(self._join(path), **kwargs) + + async def _rm(self, path, *args, **kwargs): + return await self.fs._rm(self._join(path), *args, **kwargs) + + def rm(self, path, *args, **kwargs): + return self.fs.rm(self._join(path), *args, **kwargs) + + async def _cp_file(self, path1, path2, **kwargs): + return await self.fs._cp_file(self._join(path1), self._join(path2), **kwargs) + + def cp_file(self, path1, path2, **kwargs): + return self.fs.cp_file(self._join(path1), self._join(path2), **kwargs) + + async def _copy( + self, + path1, + path2, + *args, + **kwargs, + ): + return await self.fs._copy( + self._join(path1), + self._join(path2), + *args, + **kwargs, + ) + + def copy(self, path1, path2, *args, **kwargs): + return self.fs.copy( + self._join(path1), + self._join(path2), + *args, + **kwargs, + ) + + async def _pipe(self, path, *args, **kwargs): + return await self.fs._pipe(self._join(path), *args, **kwargs) + + def pipe(self, path, *args, **kwargs): + return self.fs.pipe(self._join(path), *args, **kwargs) + + async def _pipe_file(self, path, *args, **kwargs): + return await self.fs._pipe_file(self._join(path), *args, **kwargs) + + def pipe_file(self, path, *args, **kwargs): + return self.fs.pipe_file(self._join(path), *args, **kwargs) + + async def _cat_file(self, path, *args, **kwargs): + return await self.fs._cat_file(self._join(path), *args, **kwargs) + + def cat_file(self, path, *args, **kwargs): + return self.fs.cat_file(self._join(path), *args, **kwargs) + + async def _cat(self, path, *args, **kwargs): + ret = await self.fs._cat( + self._join(path), + *args, + **kwargs, + ) + + if isinstance(ret, dict): + return {self._relpath(key): value for key, value in ret.items()} + + return ret + + def cat(self, path, *args, **kwargs): + ret = self.fs.cat( + self._join(path), + *args, + **kwargs, + ) + + if isinstance(ret, dict): + return {self._relpath(key): value for key, value in ret.items()} + + return ret + + async def _put_file(self, lpath, rpath, **kwargs): + return await self.fs._put_file(lpath, self._join(rpath), **kwargs) + + def put_file(self, lpath, rpath, **kwargs): + return self.fs.put_file(lpath, self._join(rpath), **kwargs) + + async def _put( + self, + lpath, + rpath, + *args, + **kwargs, + ): + return await self.fs._put( + lpath, + self._join(rpath), + *args, + **kwargs, + ) + + def put(self, lpath, rpath, *args, **kwargs): + return self.fs.put( + lpath, + self._join(rpath), + *args, + **kwargs, + ) + + async def _get_file(self, rpath, lpath, **kwargs): + return await self.fs._get_file(self._join(rpath), lpath, **kwargs) + + def get_file(self, rpath, lpath, **kwargs): + return self.fs.get_file(self._join(rpath), lpath, **kwargs) + + async def _get(self, rpath, *args, **kwargs): + return await self.fs._get(self._join(rpath), *args, **kwargs) + + def get(self, rpath, *args, **kwargs): + return self.fs.get(self._join(rpath), *args, **kwargs) + + async def _isfile(self, path): + return await self.fs._isfile(self._join(path)) + + def isfile(self, path): + return self.fs.isfile(self._join(path)) + + async def _isdir(self, path): + return await self.fs._isdir(self._join(path)) + + def isdir(self, path): + return self.fs.isdir(self._join(path)) + + async def _size(self, path): + return await self.fs._size(self._join(path)) + + def size(self, path): + return self.fs.size(self._join(path)) + + async def _exists(self, path): + return await self.fs._exists(self._join(path)) + + def exists(self, path): + return self.fs.exists(self._join(path)) + + async def _info(self, path, **kwargs): + info = await self.fs._info(self._join(path), **kwargs) + info = info.copy() + info["name"] = self._relpath(info["name"]) + return info + + def info(self, path, **kwargs): + info = self.fs.info(self._join(path), **kwargs) + info = info.copy() + info["name"] = self._relpath(info["name"]) + return info + + async def _ls(self, path, detail=True, **kwargs): + ret = (await self.fs._ls(self._join(path), detail=detail, **kwargs)).copy() + if detail: + out = [] + for entry in ret: + entry = entry.copy() + entry["name"] = self._relpath(entry["name"]) + out.append(entry) + return out + + return self._relpath(ret) + + def ls(self, path, detail=True, **kwargs): + ret = self.fs.ls(self._join(path), detail=detail, **kwargs).copy() + if detail: + out = [] + for entry in ret: + entry = entry.copy() + entry["name"] = self._relpath(entry["name"]) + out.append(entry) + return out + + return self._relpath(ret) + + async def _walk(self, path, *args, **kwargs): + async for root, dirs, files in self.fs._walk(self._join(path), *args, **kwargs): + yield self._relpath(root), dirs, files + + def walk(self, path, *args, **kwargs): + for root, dirs, files in self.fs.walk(self._join(path), *args, **kwargs): + yield self._relpath(root), dirs, files + + async def _glob(self, path, **kwargs): + detail = kwargs.get("detail", False) + ret = await self.fs._glob(self._join(path), **kwargs) + if detail: + return {self._relpath(path): info for path, info in ret.items()} + return self._relpath(ret) + + def glob(self, path, **kwargs): + detail = kwargs.get("detail", False) + ret = self.fs.glob(self._join(path), **kwargs) + if detail: + return {self._relpath(path): info for path, info in ret.items()} + return self._relpath(ret) + + async def _du(self, path, *args, **kwargs): + total = kwargs.get("total", True) + ret = await self.fs._du(self._join(path), *args, **kwargs) + if total: + return ret + + return {self._relpath(path): size for path, size in ret.items()} + + def du(self, path, *args, **kwargs): + total = kwargs.get("total", True) + ret = self.fs.du(self._join(path), *args, **kwargs) + if total: + return ret + + return {self._relpath(path): size for path, size in ret.items()} + + async def _find(self, path, *args, **kwargs): + detail = kwargs.get("detail", False) + ret = await self.fs._find(self._join(path), *args, **kwargs) + if detail: + return {self._relpath(path): info for path, info in ret.items()} + return self._relpath(ret) + + def find(self, path, *args, **kwargs): + detail = kwargs.get("detail", False) + ret = self.fs.find(self._join(path), *args, **kwargs) + if detail: + return {self._relpath(path): info for path, info in ret.items()} + return self._relpath(ret) + + async def _expand_path(self, path, *args, **kwargs): + return self._relpath( + await self.fs._expand_path(self._join(path), *args, **kwargs) + ) + + def expand_path(self, path, *args, **kwargs): + return self._relpath(self.fs.expand_path(self._join(path), *args, **kwargs)) + + async def _mkdir(self, path, *args, **kwargs): + return await self.fs._mkdir(self._join(path), *args, **kwargs) + + def mkdir(self, path, *args, **kwargs): + return self.fs.mkdir(self._join(path), *args, **kwargs) + + async def _makedirs(self, path, *args, **kwargs): + return await self.fs._makedirs(self._join(path), *args, **kwargs) + + def makedirs(self, path, *args, **kwargs): + return self.fs.makedirs(self._join(path), *args, **kwargs) + + def rmdir(self, path): + return self.fs.rmdir(self._join(path)) + + def mv(self, path1, path2, **kwargs): + return self.fs.mv( + self._join(path1), + self._join(path2), + **kwargs, + ) + + def touch(self, path, **kwargs): + return self.fs.touch(self._join(path), **kwargs) + + def created(self, path): + return self.fs.created(self._join(path)) + + def modified(self, path): + return self.fs.modified(self._join(path)) + + def sign(self, path, *args, **kwargs): + return self.fs.sign(self._join(path), *args, **kwargs) + + def __repr__(self): + return f"{self.__class__.__qualname__}(path='{self.path}', fs={self.fs})" + + def open( + self, + path, + *args, + **kwargs, + ): + return self.fs.open( + self._join(path), + *args, + **kwargs, + ) + + async def open_async( + self, + path, + *args, + **kwargs, + ): + return await self.fs.open_async( + self._join(path), + *args, + **kwargs, + ) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/ftp.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/ftp.py new file mode 100644 index 0000000000000000000000000000000000000000..a3db22b04a00ddf12582253ec19b2938c794c1da --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/ftp.py @@ -0,0 +1,387 @@ +import os +import uuid +from ftplib import FTP, FTP_TLS, Error, error_perm +from typing import Any + +from ..spec import AbstractBufferedFile, AbstractFileSystem +from ..utils import infer_storage_options, isfilelike + + +class FTPFileSystem(AbstractFileSystem): + """A filesystem over classic FTP""" + + root_marker = "/" + cachable = False + protocol = "ftp" + + def __init__( + self, + host, + port=21, + username=None, + password=None, + acct=None, + block_size=None, + tempdir=None, + timeout=30, + encoding="utf-8", + tls=False, + **kwargs, + ): + """ + You can use _get_kwargs_from_urls to get some kwargs from + a reasonable FTP url. + + Authentication will be anonymous if username/password are not + given. + + Parameters + ---------- + host: str + The remote server name/ip to connect to + port: int + Port to connect with + username: str or None + If authenticating, the user's identifier + password: str of None + User's password on the server, if using + acct: str or None + Some servers also need an "account" string for auth + block_size: int or None + If given, the read-ahead or write buffer size. + tempdir: str + Directory on remote to put temporary files when in a transaction + timeout: int + Timeout of the ftp connection in seconds + encoding: str + Encoding to use for directories and filenames in FTP connection + tls: bool + Use FTP-TLS, by default False + """ + super().__init__(**kwargs) + self.host = host + self.port = port + self.tempdir = tempdir or "/tmp" + self.cred = username or "", password or "", acct or "" + self.timeout = timeout + self.encoding = encoding + if block_size is not None: + self.blocksize = block_size + else: + self.blocksize = 2**16 + self.tls = tls + self._connect() + if self.tls: + self.ftp.prot_p() + + def _connect(self): + if self.tls: + ftp_cls = FTP_TLS + else: + ftp_cls = FTP + self.ftp = ftp_cls(timeout=self.timeout, encoding=self.encoding) + self.ftp.connect(self.host, self.port) + self.ftp.login(*self.cred) + + @classmethod + def _strip_protocol(cls, path): + return "/" + infer_storage_options(path)["path"].lstrip("/").rstrip("/") + + @staticmethod + def _get_kwargs_from_urls(urlpath): + out = infer_storage_options(urlpath) + out.pop("path", None) + out.pop("protocol", None) + return out + + def ls(self, path, detail=True, **kwargs): + path = self._strip_protocol(path) + out = [] + if path not in self.dircache: + try: + try: + out = [ + (fn, details) + for (fn, details) in self.ftp.mlsd(path) + if fn not in [".", ".."] + and details["type"] not in ["pdir", "cdir"] + ] + except error_perm: + out = _mlsd2(self.ftp, path) # Not platform independent + for fn, details in out: + details["name"] = "/".join( + ["" if path == "/" else path, fn.lstrip("/")] + ) + if details["type"] == "file": + details["size"] = int(details["size"]) + else: + details["size"] = 0 + if details["type"] == "dir": + details["type"] = "directory" + self.dircache[path] = out + except Error: + try: + info = self.info(path) + if info["type"] == "file": + out = [(path, info)] + except (Error, IndexError) as exc: + raise FileNotFoundError(path) from exc + files = self.dircache.get(path, out) + if not detail: + return sorted([fn for fn, details in files]) + return [details for fn, details in files] + + def info(self, path, **kwargs): + # implement with direct method + path = self._strip_protocol(path) + if path == "/": + # special case, since this dir has no real entry + return {"name": "/", "size": 0, "type": "directory"} + files = self.ls(self._parent(path).lstrip("/"), True) + try: + out = next(f for f in files if f["name"] == path) + except StopIteration as exc: + raise FileNotFoundError(path) from exc + return out + + def get_file(self, rpath, lpath, **kwargs): + if self.isdir(rpath): + if not os.path.exists(lpath): + os.mkdir(lpath) + return + if isfilelike(lpath): + outfile = lpath + else: + outfile = open(lpath, "wb") + + def cb(x): + outfile.write(x) + + self.ftp.retrbinary( + f"RETR {rpath}", + blocksize=self.blocksize, + callback=cb, + ) + if not isfilelike(lpath): + outfile.close() + + def cat_file(self, path, start=None, end=None, **kwargs): + if end is not None: + return super().cat_file(path, start, end, **kwargs) + out = [] + + def cb(x): + out.append(x) + + try: + self.ftp.retrbinary( + f"RETR {path}", + blocksize=self.blocksize, + rest=start, + callback=cb, + ) + except (Error, error_perm) as orig_exc: + raise FileNotFoundError(path) from orig_exc + return b"".join(out) + + def _open( + self, + path, + mode="rb", + block_size=None, + cache_options=None, + autocommit=True, + **kwargs, + ): + path = self._strip_protocol(path) + block_size = block_size or self.blocksize + return FTPFile( + self, + path, + mode=mode, + block_size=block_size, + tempdir=self.tempdir, + autocommit=autocommit, + cache_options=cache_options, + ) + + def _rm(self, path): + path = self._strip_protocol(path) + self.ftp.delete(path) + self.invalidate_cache(self._parent(path)) + + def rm(self, path, recursive=False, maxdepth=None): + paths = self.expand_path(path, recursive=recursive, maxdepth=maxdepth) + for p in reversed(paths): + if self.isfile(p): + self.rm_file(p) + else: + self.rmdir(p) + + def mkdir(self, path: str, create_parents: bool = True, **kwargs: Any) -> None: + path = self._strip_protocol(path) + parent = self._parent(path) + if parent != self.root_marker and not self.exists(parent) and create_parents: + self.mkdir(parent, create_parents=create_parents) + + self.ftp.mkd(path) + self.invalidate_cache(self._parent(path)) + + def makedirs(self, path: str, exist_ok: bool = False) -> None: + path = self._strip_protocol(path) + if self.exists(path): + # NB: "/" does not "exist" as it has no directory entry + if not exist_ok: + raise FileExistsError(f"{path} exists without `exist_ok`") + # exists_ok=True -> no-op + else: + self.mkdir(path, create_parents=True) + + def rmdir(self, path): + path = self._strip_protocol(path) + self.ftp.rmd(path) + self.invalidate_cache(self._parent(path)) + + def mv(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1) + path2 = self._strip_protocol(path2) + self.ftp.rename(path1, path2) + self.invalidate_cache(self._parent(path1)) + self.invalidate_cache(self._parent(path2)) + + def __del__(self): + self.ftp.close() + + def invalidate_cache(self, path=None): + if path is None: + self.dircache.clear() + else: + self.dircache.pop(path, None) + super().invalidate_cache(path) + + +class TransferDone(Exception): + """Internal exception to break out of transfer""" + + pass + + +class FTPFile(AbstractBufferedFile): + """Interact with a remote FTP file with read/write buffering""" + + def __init__( + self, + fs, + path, + mode="rb", + block_size="default", + autocommit=True, + cache_type="readahead", + cache_options=None, + **kwargs, + ): + super().__init__( + fs, + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_type=cache_type, + cache_options=cache_options, + **kwargs, + ) + if not autocommit: + self.target = self.path + self.path = "/".join([kwargs["tempdir"], str(uuid.uuid4())]) + + def commit(self): + self.fs.mv(self.path, self.target) + + def discard(self): + self.fs.rm(self.path) + + def _fetch_range(self, start, end): + """Get bytes between given byte limits + + Implemented by raising an exception in the fetch callback when the + number of bytes received reaches the requested amount. + + Will fail if the server does not respect the REST command on + retrieve requests. + """ + out = [] + total = [0] + + def callback(x): + total[0] += len(x) + if total[0] > end - start: + out.append(x[: (end - start) - total[0]]) + if end < self.size: + raise TransferDone + else: + out.append(x) + + if total[0] == end - start and end < self.size: + raise TransferDone + + try: + self.fs.ftp.retrbinary( + f"RETR {self.path}", + blocksize=self.blocksize, + rest=start, + callback=callback, + ) + except TransferDone: + try: + # stop transfer, we got enough bytes for this block + self.fs.ftp.abort() + self.fs.ftp.getmultiline() + except Error: + self.fs._connect() + + return b"".join(out) + + def _upload_chunk(self, final=False): + self.buffer.seek(0) + self.fs.ftp.storbinary( + f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset + ) + return True + + +def _mlsd2(ftp, path="."): + """ + Fall back to using `dir` instead of `mlsd` if not supported. + + This parses a Linux style `ls -l` response to `dir`, but the response may + be platform dependent. + + Parameters + ---------- + ftp: ftplib.FTP + path: str + Expects to be given path, but defaults to ".". + """ + lines = [] + minfo = [] + ftp.dir(path, lines.append) + for line in lines: + split_line = line.split() + if len(split_line) < 9: + continue + this = ( + split_line[-1], + { + "modify": " ".join(split_line[5:8]), + "unix.owner": split_line[2], + "unix.group": split_line[3], + "unix.mode": split_line[0], + "size": split_line[4], + }, + ) + if this[1]["unix.mode"][0] == "d": + this[1]["type"] = "dir" + else: + this[1]["type"] = "file" + minfo.append(this) + return minfo diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/gist.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/gist.py new file mode 100644 index 0000000000000000000000000000000000000000..74117b544b02108fd2a7be06c716df477481ed47 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/gist.py @@ -0,0 +1,232 @@ +import requests + +from ..spec import AbstractFileSystem +from ..utils import infer_storage_options +from .memory import MemoryFile + + +class GistFileSystem(AbstractFileSystem): + """ + Interface to files in a single GitHub Gist. + + Provides read-only access to a gist's files. Gists do not contain + subdirectories, so file listing is straightforward. + + Parameters + ---------- + gist_id : str + The ID of the gist you want to access (the long hex value from the URL). + filenames : list[str] (optional) + If provided, only make a file system representing these files, and do not fetch + the list of all files for this gist. + sha : str (optional) + If provided, fetch a particular revision of the gist. If omitted, + the latest revision is used. + username : str (optional) + GitHub username for authentication (required if token is given). + token : str (optional) + GitHub personal access token (required if username is given). + timeout : (float, float) or float, optional + Connect and read timeouts for requests (default 60s each). + kwargs : dict + Stored on `self.request_kw` and passed to `requests.get` when fetching Gist + metadata or reading ("opening") a file. + """ + + protocol = "gist" + gist_url = "https://api.github.com/gists/{gist_id}" + gist_rev_url = "https://api.github.com/gists/{gist_id}/{sha}" + + def __init__( + self, + gist_id, + filenames=None, + sha=None, + username=None, + token=None, + timeout=None, + **kwargs, + ): + super().__init__() + self.gist_id = gist_id + self.filenames = filenames + self.sha = sha # revision of the gist (optional) + if (username is None) ^ (token is None): + # Both or neither must be set + if username or token: + raise ValueError("Auth requires both username and token, or neither.") + self.username = username + self.token = token + self.request_kw = kwargs + # Default timeouts to 60s connect/read if none provided + self.timeout = timeout if timeout is not None else (60, 60) + + # We use a single-level "directory" cache, because a gist is essentially flat + self.dircache[""] = self._fetch_file_list() + + @property + def kw(self): + """Auth parameters passed to 'requests' if we have username/token.""" + if self.username is not None and self.token is not None: + return {"auth": (self.username, self.token), **self.request_kw} + return self.request_kw + + def _fetch_gist_metadata(self): + """ + Fetch the JSON metadata for this gist (possibly for a specific revision). + """ + if self.sha: + url = self.gist_rev_url.format(gist_id=self.gist_id, sha=self.sha) + else: + url = self.gist_url.format(gist_id=self.gist_id) + + r = requests.get(url, timeout=self.timeout, **self.kw) + if r.status_code == 404: + raise FileNotFoundError( + f"Gist not found: {self.gist_id}@{self.sha or 'latest'}" + ) + r.raise_for_status() + return r.json() + + def _fetch_file_list(self): + """ + Returns a list of dicts describing each file in the gist. These get stored + in self.dircache[""]. + """ + meta = self._fetch_gist_metadata() + if self.filenames: + available_files = meta.get("files", {}) + files = {} + for fn in self.filenames: + if fn not in available_files: + raise FileNotFoundError(fn) + files[fn] = available_files[fn] + else: + files = meta.get("files", {}) + + out = [] + for fname, finfo in files.items(): + if finfo is None: + # Occasionally GitHub returns a file entry with null if it was deleted + continue + # Build a directory entry + out.append( + { + "name": fname, # file's name + "type": "file", # gists have no subdirectories + "size": finfo.get("size", 0), # file size in bytes + "raw_url": finfo.get("raw_url"), + } + ) + return out + + @classmethod + def _strip_protocol(cls, path): + """ + Remove 'gist://' from the path, if present. + """ + # The default infer_storage_options can handle gist://username:token@id/file + # or gist://id/file, but let's ensure we handle a normal usage too. + # We'll just strip the protocol prefix if it exists. + path = infer_storage_options(path).get("path", path) + return path.lstrip("/") + + @staticmethod + def _get_kwargs_from_urls(path): + """ + Parse 'gist://' style URLs into GistFileSystem constructor kwargs. + For example: + gist://:TOKEN@/file.txt + gist://username:TOKEN@/file.txt + """ + so = infer_storage_options(path) + out = {} + if "username" in so and so["username"]: + out["username"] = so["username"] + if "password" in so and so["password"]: + out["token"] = so["password"] + if "host" in so and so["host"]: + # We interpret 'host' as the gist ID + out["gist_id"] = so["host"] + + # Extract SHA and filename from path + if "path" in so and so["path"]: + path_parts = so["path"].rsplit("/", 2)[-2:] + if len(path_parts) == 2: + if path_parts[0]: # SHA present + out["sha"] = path_parts[0] + if path_parts[1]: # filename also present + out["filenames"] = [path_parts[1]] + + return out + + def ls(self, path="", detail=False, **kwargs): + """ + List files in the gist. Gists are single-level, so any 'path' is basically + the filename, or empty for all files. + + Parameters + ---------- + path : str, optional + The filename to list. If empty, returns all files in the gist. + detail : bool, default False + If True, return a list of dicts; if False, return a list of filenames. + """ + path = self._strip_protocol(path or "") + # If path is empty, return all + if path == "": + results = self.dircache[""] + else: + # We want just the single file with this name + all_files = self.dircache[""] + results = [f for f in all_files if f["name"] == path] + if not results: + raise FileNotFoundError(path) + if detail: + return results + else: + return sorted(f["name"] for f in results) + + def _open(self, path, mode="rb", block_size=None, **kwargs): + """ + Read a single file from the gist. + """ + if mode != "rb": + raise NotImplementedError("GitHub Gist FS is read-only (no write).") + + path = self._strip_protocol(path) + # Find the file entry in our dircache + matches = [f for f in self.dircache[""] if f["name"] == path] + if not matches: + raise FileNotFoundError(path) + finfo = matches[0] + + raw_url = finfo.get("raw_url") + if not raw_url: + raise FileNotFoundError(f"No raw_url for file: {path}") + + r = requests.get(raw_url, timeout=self.timeout, **self.kw) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + return MemoryFile(path, None, r.content) + + def cat(self, path, recursive=False, on_error="raise", **kwargs): + """ + Return {path: contents} for the given file or files. If 'recursive' is True, + and path is empty, returns all files in the gist. + """ + paths = self.expand_path(path, recursive=recursive) + out = {} + for p in paths: + try: + with self.open(p, "rb") as f: + out[p] = f.read() + except FileNotFoundError as e: + if on_error == "raise": + raise e + elif on_error == "omit": + pass # skip + else: + out[p] = e + return out diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/git.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/git.py new file mode 100644 index 0000000000000000000000000000000000000000..808d293a1c991ea87d19a2129f3e56d9b813daaa --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/git.py @@ -0,0 +1,114 @@ +import os + +import pygit2 + +from fsspec.spec import AbstractFileSystem + +from .memory import MemoryFile + + +class GitFileSystem(AbstractFileSystem): + """Browse the files of a local git repo at any hash/tag/branch + + (experimental backend) + """ + + root_marker = "" + cachable = True + + def __init__(self, path=None, fo=None, ref=None, **kwargs): + """ + + Parameters + ---------- + path: str (optional) + Local location of the repo (uses current directory if not given). + May be deprecated in favour of ``fo``. When used with a higher + level function such as fsspec.open(), may be of the form + "git://[path-to-repo[:]][ref@]path/to/file" (but the actual + file path should not contain "@" or ":"). + fo: str (optional) + Same as ``path``, but passed as part of a chained URL. This one + takes precedence if both are given. + ref: str (optional) + Reference to work with, could be a hash, tag or branch name. Defaults + to current working tree. Note that ``ls`` and ``open`` also take hash, + so this becomes the default for those operations + kwargs + """ + super().__init__(**kwargs) + self.repo = pygit2.Repository(fo or path or os.getcwd()) + self.ref = ref or "master" + + @classmethod + def _strip_protocol(cls, path): + path = super()._strip_protocol(path).lstrip("/") + if ":" in path: + path = path.split(":", 1)[1] + if "@" in path: + path = path.split("@", 1)[1] + return path.lstrip("/") + + def _path_to_object(self, path, ref): + comm, ref = self.repo.resolve_refish(ref or self.ref) + parts = path.split("/") + tree = comm.tree + for part in parts: + if part and isinstance(tree, pygit2.Tree): + if part not in tree: + raise FileNotFoundError(path) + tree = tree[part] + return tree + + @staticmethod + def _get_kwargs_from_urls(path): + path = path.removeprefix("git://") + out = {} + if ":" in path: + out["path"], path = path.split(":", 1) + if "@" in path: + out["ref"], path = path.split("@", 1) + return out + + @staticmethod + def _object_to_info(obj, path=None): + # obj.name and obj.filemode are None for the root tree! + is_dir = isinstance(obj, pygit2.Tree) + return { + "type": "directory" if is_dir else "file", + "name": ( + "/".join([path, obj.name or ""]).lstrip("/") if path else obj.name + ), + "hex": str(obj.id), + "mode": "100644" if obj.filemode is None else f"{obj.filemode:o}", + "size": 0 if is_dir else obj.size, + } + + def ls(self, path, detail=True, ref=None, **kwargs): + tree = self._path_to_object(self._strip_protocol(path), ref) + return [ + GitFileSystem._object_to_info(obj, path) + if detail + else GitFileSystem._object_to_info(obj, path)["name"] + for obj in (tree if isinstance(tree, pygit2.Tree) else [tree]) + ] + + def info(self, path, ref=None, **kwargs): + tree = self._path_to_object(self._strip_protocol(path), ref) + return GitFileSystem._object_to_info(tree, path) + + def ukey(self, path, ref=None): + return self.info(path, ref=ref)["hex"] + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + ref=None, + **kwargs, + ): + obj = self._path_to_object(path, ref or self.ref) + return MemoryFile(data=obj.data) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/github.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/github.py new file mode 100644 index 0000000000000000000000000000000000000000..3630f6db54413e2c396f6cc1b6b10cd379200043 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/github.py @@ -0,0 +1,333 @@ +import base64 +import re + +import requests + +from ..spec import AbstractFileSystem +from ..utils import infer_storage_options +from .memory import MemoryFile + + +class GithubFileSystem(AbstractFileSystem): + """Interface to files in github + + An instance of this class provides the files residing within a remote github + repository. You may specify a point in the repos history, by SHA, branch + or tag (default is current master). + + For files less than 1 MB in size, file content is returned directly in a + MemoryFile. For larger files, or for files tracked by git-lfs, file content + is returned as an HTTPFile wrapping the ``download_url`` provided by the + GitHub API. + + When using fsspec.open, allows URIs of the form: + + - "github://path/file", in which case you must specify org, repo and + may specify sha in the extra args + - 'github://org:repo@/precip/catalog.yml', where the org and repo are + part of the URI + - 'github://org:repo@sha/precip/catalog.yml', where the sha is also included + + ``sha`` can be the full or abbreviated hex of the commit you want to fetch + from, or a branch or tag name (so long as it doesn't contain special characters + like "/", "?", which would have to be HTTP-encoded). + + For authorised access, you must provide username and token, which can be made + at https://github.com/settings/tokens + """ + + url = "https://api.github.com/repos/{org}/{repo}/git/trees/{sha}" + content_url = "https://api.github.com/repos/{org}/{repo}/contents/{path}?ref={sha}" + protocol = "github" + timeout = (60, 60) # connect, read timeouts + + def __init__( + self, org, repo, sha=None, username=None, token=None, timeout=None, **kwargs + ): + super().__init__(**kwargs) + self.org = org + self.repo = repo + if (username is None) ^ (token is None): + raise ValueError("Auth required both username and token") + self.username = username + self.token = token + if timeout is not None: + self.timeout = timeout + if sha is None: + # look up default branch (not necessarily "master") + u = "https://api.github.com/repos/{org}/{repo}" + r = requests.get( + u.format(org=org, repo=repo), timeout=self.timeout, **self.kw + ) + r.raise_for_status() + sha = r.json()["default_branch"] + + self.root = sha + self.ls("") + try: + from .http import HTTPFileSystem + + self.http_fs = HTTPFileSystem(**kwargs) + except ImportError: + self.http_fs = None + + @property + def kw(self): + if self.username: + return {"auth": (self.username, self.token)} + return {} + + @classmethod + def repos(cls, org_or_user, is_org=True): + """List repo names for given org or user + + This may become the top level of the FS + + Parameters + ---------- + org_or_user: str + Name of the github org or user to query + is_org: bool (default True) + Whether the name is an organisation (True) or user (False) + + Returns + ------- + List of string + """ + r = requests.get( + f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos", + timeout=cls.timeout, + ) + r.raise_for_status() + return [repo["name"] for repo in r.json()] + + @property + def tags(self): + """Names of tags in the repo""" + r = requests.get( + f"https://api.github.com/repos/{self.org}/{self.repo}/tags", + timeout=self.timeout, + **self.kw, + ) + r.raise_for_status() + return [t["name"] for t in r.json()] + + @property + def branches(self): + """Names of branches in the repo""" + r = requests.get( + f"https://api.github.com/repos/{self.org}/{self.repo}/branches", + timeout=self.timeout, + **self.kw, + ) + r.raise_for_status() + return [t["name"] for t in r.json()] + + @property + def refs(self): + """Named references, tags and branches""" + return {"tags": self.tags, "branches": self.branches} + + def ls(self, path, detail=False, sha=None, _sha=None, **kwargs): + """List files at given path + + Parameters + ---------- + path: str + Location to list, relative to repo root + detail: bool + If True, returns list of dicts, one per file; if False, returns + list of full filenames only + sha: str (optional) + List at the given point in the repo history, branch or tag name or commit + SHA + _sha: str (optional) + List this specific tree object (used internally to descend into trees) + """ + path = self._strip_protocol(path) + if path == "": + _sha = sha or self.root + if _sha is None: + parts = path.rstrip("/").split("/") + so_far = "" + _sha = sha or self.root + for part in parts: + out = self.ls(so_far, True, sha=sha, _sha=_sha) + so_far += "/" + part if so_far else part + out = [o for o in out if o["name"] == so_far] + if not out: + raise FileNotFoundError(path) + out = out[0] + if out["type"] == "file": + if detail: + return [out] + else: + return path + _sha = out["sha"] + if path not in self.dircache or sha not in [self.root, None]: + r = requests.get( + self.url.format(org=self.org, repo=self.repo, sha=_sha), + timeout=self.timeout, + **self.kw, + ) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + types = {"blob": "file", "tree": "directory"} + out = [ + { + "name": path + "/" + f["path"] if path else f["path"], + "mode": f["mode"], + "type": types[f["type"]], + "size": f.get("size", 0), + "sha": f["sha"], + } + for f in r.json()["tree"] + if f["type"] in types + ] + if sha in [self.root, None]: + self.dircache[path] = out + else: + out = self.dircache[path] + if detail: + return out + else: + return sorted([f["name"] for f in out]) + + def invalidate_cache(self, path=None): + self.dircache.clear() + + @classmethod + def _strip_protocol(cls, path): + opts = infer_storage_options(path) + if "username" not in opts: + return super()._strip_protocol(path) + return opts["path"].lstrip("/") + + @staticmethod + def _get_kwargs_from_urls(path): + opts = infer_storage_options(path) + if "username" not in opts: + return {} + out = {"org": opts["username"], "repo": opts["password"]} + if opts["host"]: + out["sha"] = opts["host"] + return out + + def _open( + self, + path, + mode="rb", + block_size=None, + cache_options=None, + sha=None, + **kwargs, + ): + if mode != "rb": + raise NotImplementedError + + # construct a url to hit the GitHub API's repo contents API + url = self.content_url.format( + org=self.org, repo=self.repo, path=path, sha=sha or self.root + ) + + # make a request to this API, and parse the response as JSON + r = requests.get(url, timeout=self.timeout, **self.kw) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + content_json = r.json() + + # if the response's content key is not empty, try to parse it as base64 + if content_json["content"]: + content = base64.b64decode(content_json["content"]) + + # as long as the content does not start with the string + # "version https://git-lfs.github.com/" + # then it is probably not a git-lfs pointer and we can just return + # the content directly + if not content.startswith(b"version https://git-lfs.github.com/"): + return MemoryFile(None, None, content) + + # we land here if the content was not present in the first response + # (regular file over 1MB or git-lfs tracked file) + # in this case, we get let the HTTPFileSystem handle the download + if self.http_fs is None: + raise ImportError( + "Please install fsspec[http] to access github files >1 MB " + "or git-lfs tracked files." + ) + return self.http_fs.open( + content_json["download_url"], + mode=mode, + block_size=block_size, + cache_options=cache_options, + **kwargs, + ) + + def rm(self, path, recursive=False, maxdepth=None, message=None): + path = self.expand_path(path, recursive=recursive, maxdepth=maxdepth) + for p in reversed(path): + self.rm_file(p, message=message) + + def rm_file(self, path, message=None, **kwargs): + """ + Remove a file from a specified branch using a given commit message. + + Since Github DELETE operation requires a branch name, and we can't reliably + determine whether the provided SHA refers to a branch, tag, or commit, we + assume it's a branch. If it's not, the user will encounter an error when + attempting to retrieve the file SHA or delete the file. + + Parameters + ---------- + path: str + The file's location relative to the repository root. + message: str, optional + The commit message for the deletion. + """ + + if not self.username: + raise ValueError("Authentication required") + + path = self._strip_protocol(path) + + # Attempt to get SHA from cache or Github API + sha = self._get_sha_from_cache(path) + if not sha: + url = self.content_url.format( + org=self.org, repo=self.repo, path=path.lstrip("/"), sha=self.root + ) + r = requests.get(url, timeout=self.timeout, **self.kw) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + sha = r.json()["sha"] + + # Delete the file + delete_url = self.content_url.format( + org=self.org, repo=self.repo, path=path, sha=self.root + ) + branch = self.root + data = { + "message": message or f"Delete {path}", + "sha": sha, + **({"branch": branch} if branch else {}), + } + + r = requests.delete(delete_url, json=data, timeout=self.timeout, **self.kw) + error_message = r.json().get("message", "") + if re.search(r"Branch .+ not found", error_message): + error = "Remove only works when the filesystem is initialised from a branch or default (None)" + raise ValueError(error) + r.raise_for_status() + + self.invalidate_cache(path) + + def _get_sha_from_cache(self, path): + for entries in self.dircache.values(): + for entry in entries: + entry_path = entry.get("name") + if entry_path and entry_path == path and "sha" in entry: + return entry["sha"] + return None diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http.py new file mode 100644 index 0000000000000000000000000000000000000000..ea8d79d461ab0aca6c6d2aab2cc7c2821773bc65 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http.py @@ -0,0 +1,890 @@ +import asyncio +import io +import logging +import re +import weakref +from copy import copy +from urllib.parse import urlparse + +import aiohttp +import yarl + +from fsspec.asyn import AbstractAsyncStreamedFile, AsyncFileSystem, sync, sync_wrapper +from fsspec.callbacks import DEFAULT_CALLBACK +from fsspec.exceptions import FSTimeoutError +from fsspec.spec import AbstractBufferedFile +from fsspec.utils import ( + DEFAULT_BLOCK_SIZE, + glob_translate, + isfilelike, + nullcontext, + tokenize, +) + +from ..caching import AllBytes + +# https://stackoverflow.com/a/15926317/3821154 +ex = re.compile(r"""<(a|A)\s+(?:[^>]*?\s+)?(href|HREF)=["'](?P[^"']+)""") +ex2 = re.compile(r"""(?Phttp[s]?://[-a-zA-Z0-9@:%_+.~#?&/=]+)""") +logger = logging.getLogger("fsspec.http") + + +async def get_client(**kwargs): + return aiohttp.ClientSession(**kwargs) + + +class HTTPFileSystem(AsyncFileSystem): + """ + Simple File-System for fetching data via HTTP(S) + + ``ls()`` is implemented by loading the parent page and doing a regex + match on the result. If simple_link=True, anything of the form + "http(s)://server.com/stuff?thing=other"; otherwise only links within + HTML href tags will be used. + """ + + sep = "/" + + def __init__( + self, + simple_links=True, + block_size=None, + same_scheme=True, + size_policy=None, + cache_type="bytes", + cache_options=None, + asynchronous=False, + loop=None, + client_kwargs=None, + get_client=get_client, + encoded=False, + **storage_options, + ): + """ + NB: if this is called async, you must await set_client + + Parameters + ---------- + block_size: int + Blocks to read bytes; if 0, will default to raw requests file-like + objects instead of HTTPFile instances + simple_links: bool + If True, will consider both HTML tags and anything that looks + like a URL; if False, will consider only the former. + same_scheme: True + When doing ls/glob, if this is True, only consider paths that have + http/https matching the input URLs. + size_policy: this argument is deprecated + client_kwargs: dict + Passed to aiohttp.ClientSession, see + https://docs.aiohttp.org/en/stable/client_reference.html + For example, ``{'auth': aiohttp.BasicAuth('user', 'pass')}`` + get_client: Callable[..., aiohttp.ClientSession] + A callable, which takes keyword arguments and constructs + an aiohttp.ClientSession. Its state will be managed by + the HTTPFileSystem class. + storage_options: key-value + Any other parameters passed on to requests + cache_type, cache_options: defaults used in open() + """ + super().__init__(self, asynchronous=asynchronous, loop=loop, **storage_options) + self.block_size = block_size if block_size is not None else DEFAULT_BLOCK_SIZE + self.simple_links = simple_links + self.same_schema = same_scheme + self.cache_type = cache_type + self.cache_options = cache_options + self.client_kwargs = client_kwargs or {} + self.get_client = get_client + self.encoded = encoded + self.kwargs = storage_options + self._session = None + + # Clean caching-related parameters from `storage_options` + # before propagating them as `request_options` through `self.kwargs`. + # TODO: Maybe rename `self.kwargs` to `self.request_options` to make + # it clearer. + request_options = copy(storage_options) + self.use_listings_cache = request_options.pop("use_listings_cache", False) + request_options.pop("listings_expiry_time", None) + request_options.pop("max_paths", None) + request_options.pop("skip_instance_cache", None) + self.kwargs = request_options + + @property + def fsid(self): + return "http" + + def encode_url(self, url): + return yarl.URL(url, encoded=self.encoded) + + @staticmethod + def close_session(loop, session): + if loop is not None and loop.is_running(): + try: + sync(loop, session.close, timeout=0.1) + return + except (TimeoutError, FSTimeoutError, NotImplementedError): + pass + connector = getattr(session, "_connector", None) + if connector is not None: + # close after loop is dead + connector._close() + + async def set_session(self): + if self._session is None: + self._session = await self.get_client(loop=self.loop, **self.client_kwargs) + if not self.asynchronous: + weakref.finalize(self, self.close_session, self.loop, self._session) + return self._session + + @classmethod + def _strip_protocol(cls, path): + """For HTTP, we always want to keep the full URL""" + return path + + @classmethod + def _parent(cls, path): + # override, since _strip_protocol is different for URLs + par = super()._parent(path) + if len(par) > 7: # "http://..." + return par + return "" + + async def _ls_real(self, url, detail=True, **kwargs): + # ignoring URL-encoded arguments + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + session = await self.set_session() + async with session.get(self.encode_url(url), **self.kwargs) as r: + self._raise_not_found_for_status(r, url) + + if "Content-Type" in r.headers: + mimetype = r.headers["Content-Type"].partition(";")[0] + else: + mimetype = None + + if mimetype in ("text/html", None): + try: + text = await r.text(errors="ignore") + if self.simple_links: + links = ex2.findall(text) + [u[2] for u in ex.findall(text)] + else: + links = [u[2] for u in ex.findall(text)] + except UnicodeDecodeError: + links = [] # binary, not HTML + else: + links = [] + + out = set() + parts = urlparse(url) + for l in links: + if isinstance(l, tuple): + l = l[1] + if l.startswith("/") and len(l) > 1: + # absolute URL on this server + l = f"{parts.scheme}://{parts.netloc}{l}" + if l.startswith("http"): + if self.same_schema and l.startswith(url.rstrip("/") + "/"): + out.add(l) + elif l.replace("https", "http").startswith( + url.replace("https", "http").rstrip("/") + "/" + ): + # allowed to cross http <-> https + out.add(l) + else: + if l not in ["..", "../"]: + # Ignore FTP-like "parent" + out.add("/".join([url.rstrip("/"), l.lstrip("/")])) + if not out and url.endswith("/"): + out = await self._ls_real(url.rstrip("/"), detail=False) + if detail: + return [ + { + "name": u, + "size": None, + "type": "directory" if u.endswith("/") else "file", + } + for u in out + ] + else: + return sorted(out) + + async def _ls(self, url, detail=True, **kwargs): + if self.use_listings_cache and url in self.dircache: + out = self.dircache[url] + else: + out = await self._ls_real(url, detail=detail, **kwargs) + self.dircache[url] = out + return out + + ls = sync_wrapper(_ls) + + def _raise_not_found_for_status(self, response, url): + """ + Raises FileNotFoundError for 404s, otherwise uses raise_for_status. + """ + if response.status == 404: + raise FileNotFoundError(url) + response.raise_for_status() + + async def _cat_file(self, url, start=None, end=None, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + + if start is not None or end is not None: + if start == end: + return b"" + headers = kw.pop("headers", {}).copy() + + headers["Range"] = await self._process_limits(url, start, end) + kw["headers"] = headers + session = await self.set_session() + async with session.get(self.encode_url(url), **kw) as r: + out = await r.read() + self._raise_not_found_for_status(r, url) + return out + + async def _get_file( + self, rpath, lpath, chunk_size=5 * 2**20, callback=DEFAULT_CALLBACK, **kwargs + ): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(rpath) + session = await self.set_session() + async with session.get(self.encode_url(rpath), **kw) as r: + try: + size = int(r.headers["content-length"]) + except (ValueError, KeyError): + size = None + + callback.set_size(size) + self._raise_not_found_for_status(r, rpath) + if isfilelike(lpath): + outfile = lpath + else: + outfile = open(lpath, "wb") # noqa: ASYNC230 + + try: + chunk = True + while chunk: + chunk = await r.content.read(chunk_size) + outfile.write(chunk) + callback.relative_update(len(chunk)) + finally: + if not isfilelike(lpath): + outfile.close() + + async def _put_file( + self, + lpath, + rpath, + chunk_size=5 * 2**20, + callback=DEFAULT_CALLBACK, + method="post", + mode="overwrite", + **kwargs, + ): + if mode != "overwrite": + raise NotImplementedError("Exclusive write") + + async def gen_chunks(): + # Support passing arbitrary file-like objects + # and use them instead of streams. + if isinstance(lpath, io.IOBase): + context = nullcontext(lpath) + use_seek = False # might not support seeking + else: + context = open(lpath, "rb") # noqa: ASYNC230 + use_seek = True + + with context as f: + if use_seek: + callback.set_size(f.seek(0, 2)) + f.seek(0) + else: + callback.set_size(getattr(f, "size", None)) + + chunk = f.read(chunk_size) + while chunk: + yield chunk + callback.relative_update(len(chunk)) + chunk = f.read(chunk_size) + + kw = self.kwargs.copy() + kw.update(kwargs) + session = await self.set_session() + + method = method.lower() + if method not in ("post", "put"): + raise ValueError( + f"method has to be either 'post' or 'put', not: {method!r}" + ) + + meth = getattr(session, method) + async with meth(self.encode_url(rpath), data=gen_chunks(), **kw) as resp: + self._raise_not_found_for_status(resp, rpath) + + async def _exists(self, path, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + try: + logger.debug(path) + session = await self.set_session() + r = await session.get(self.encode_url(path), **kw) + async with r: + return r.status < 400 + except aiohttp.ClientError: + return False + + async def _isfile(self, path, **kwargs): + return await self._exists(path, **kwargs) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=None, # XXX: This differs from the base class. + cache_type=None, + cache_options=None, + size=None, + **kwargs, + ): + """Make a file-like object + + Parameters + ---------- + path: str + Full URL with protocol + mode: string + must be "rb" + block_size: int or None + Bytes to download in one request; use instance value if None. If + zero, will return a streaming Requests file-like instance. + kwargs: key-value + Any other parameters, passed to requests calls + """ + if mode != "rb": + raise NotImplementedError + block_size = block_size if block_size is not None else self.block_size + kw = self.kwargs.copy() + kw["asynchronous"] = self.asynchronous + kw.update(kwargs) + info = {} + size = size or info.update(self.info(path, **kwargs)) or info["size"] + session = sync(self.loop, self.set_session) + if block_size and size and info.get("partial", True): + return HTTPFile( + self, + path, + session=session, + block_size=block_size, + mode=mode, + size=size, + cache_type=cache_type or self.cache_type, + cache_options=cache_options or self.cache_options, + loop=self.loop, + **kw, + ) + else: + return HTTPStreamFile( + self, + path, + mode=mode, + loop=self.loop, + session=session, + **kw, + ) + + async def open_async(self, path, mode="rb", size=None, **kwargs): + session = await self.set_session() + if size is None: + try: + size = (await self._info(path, **kwargs))["size"] + except FileNotFoundError: + pass + return AsyncStreamFile( + self, + path, + loop=self.loop, + session=session, + size=size, + **kwargs, + ) + + def ukey(self, url): + """Unique identifier; assume HTTP files are static, unchanging""" + return tokenize(url, self.kwargs, self.protocol) + + async def _info(self, url, **kwargs): + """Get info of URL + + Tries to access location via HEAD, and then GET methods, but does + not fetch the data. + + It is possible that the server does not supply any size information, in + which case size will be given as None (and certain operations on the + corresponding file will not work). + """ + info = {} + session = await self.set_session() + + for policy in ["head", "get"]: + try: + info.update( + await _file_info( + self.encode_url(url), + size_policy=policy, + session=session, + **self.kwargs, + **kwargs, + ) + ) + if info.get("size") is not None: + break + except Exception as exc: + if policy == "get": + # If get failed, then raise a FileNotFoundError + raise FileNotFoundError(url) from exc + logger.debug("", exc_info=exc) + + return {"name": url, "size": None, **info, "type": "file"} + + async def _glob(self, path, maxdepth=None, **kwargs): + """ + Find files by glob-matching. + + This implementation is idntical to the one in AbstractFileSystem, + but "?" is not considered as a character for globbing, because it is + so common in URLs, often identifying the "query" part. + """ + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + import re + + ends_with_slash = path.endswith("/") # _strip_protocol strips trailing slash + path = self._strip_protocol(path) + append_slash_to_dirname = ends_with_slash or path.endswith(("/**", "/*")) + idx_star = path.find("*") if path.find("*") >= 0 else len(path) + idx_brace = path.find("[") if path.find("[") >= 0 else len(path) + + min_idx = min(idx_star, idx_brace) + + detail = kwargs.pop("detail", False) + + if not has_magic(path): + if await self._exists(path, **kwargs): + if not detail: + return [path] + else: + return {path: await self._info(path, **kwargs)} + else: + if not detail: + return [] # glob of non-existent returns empty + else: + return {} + elif "/" in path[:min_idx]: + min_idx = path[:min_idx].rindex("/") + root = path[: min_idx + 1] + depth = path[min_idx + 1 :].count("/") + 1 + else: + root = "" + depth = path[min_idx + 1 :].count("/") + 1 + + if "**" in path: + if maxdepth is not None: + idx_double_stars = path.find("**") + depth_double_stars = path[idx_double_stars:].count("/") + 1 + depth = depth - depth_double_stars + maxdepth + else: + depth = None + + allpaths = await self._find( + root, maxdepth=depth, withdirs=True, detail=True, **kwargs + ) + + pattern = glob_translate(path + ("/" if ends_with_slash else "")) + pattern = re.compile(pattern) + + out = { + ( + p.rstrip("/") + if not append_slash_to_dirname + and info["type"] == "directory" + and p.endswith("/") + else p + ): info + for p, info in sorted(allpaths.items()) + if pattern.match(p.rstrip("/")) + } + + if detail: + return out + else: + return list(out) + + async def _isdir(self, path): + # override, since all URLs are (also) files + try: + return bool(await self._ls(path)) + except (FileNotFoundError, ValueError): + return False + + async def _pipe_file(self, path, value, mode="overwrite", **kwargs): + """ + Write bytes to a remote file over HTTP. + + Parameters + ---------- + path : str + Target URL where the data should be written + value : bytes + Data to be written + mode : str + How to write to the file - 'overwrite' or 'append' + **kwargs : dict + Additional parameters to pass to the HTTP request + """ + url = self._strip_protocol(path) + headers = kwargs.pop("headers", {}) + headers["Content-Length"] = str(len(value)) + + session = await self.set_session() + + async with session.put(url, data=value, headers=headers, **kwargs) as r: + r.raise_for_status() + + +class HTTPFile(AbstractBufferedFile): + """ + A file-like object pointing to a remote HTTP(S) resource + + Supports only reading, with read-ahead of a predetermined block-size. + + In the case that the server does not supply the filesize, only reading of + the complete file in one go is supported. + + Parameters + ---------- + url: str + Full URL of the remote resource, including the protocol + session: aiohttp.ClientSession or None + All calls will be made within this session, to avoid restarting + connections where the server allows this + block_size: int or None + The amount of read-ahead to do, in bytes. Default is 5MB, or the value + configured for the FileSystem creating this file + size: None or int + If given, this is the size of the file in bytes, and we don't attempt + to call the server to find the value. + kwargs: all other key-values are passed to requests calls. + """ + + def __init__( + self, + fs, + url, + session=None, + block_size=None, + mode="rb", + cache_type="bytes", + cache_options=None, + size=None, + loop=None, + asynchronous=False, + **kwargs, + ): + if mode != "rb": + raise NotImplementedError("File mode not supported") + self.asynchronous = asynchronous + self.loop = loop + self.url = url + self.session = session + self.details = {"name": url, "size": size, "type": "file"} + super().__init__( + fs=fs, + path=url, + mode=mode, + block_size=block_size, + cache_type=cache_type, + cache_options=cache_options, + **kwargs, + ) + + def read(self, length=-1): + """Read bytes from file + + Parameters + ---------- + length: int + Read up to this many bytes. If negative, read all content to end of + file. If the server has not supplied the filesize, attempting to + read only part of the data will raise a ValueError. + """ + if ( + (length < 0 and self.loc == 0) # explicit read all + # but not when the size is known and fits into a block anyways + and not (self.size is not None and self.size <= self.blocksize) + ): + self._fetch_all() + if self.size is None: + if length < 0: + self._fetch_all() + else: + length = min(self.size - self.loc, length) + return super().read(length) + + async def async_fetch_all(self): + """Read whole file in one shot, without caching + + This is only called when position is still at zero, + and read() is called without a byte-count. + """ + logger.debug(f"Fetch all for {self}") + if not isinstance(self.cache, AllBytes): + r = await self.session.get(self.fs.encode_url(self.url), **self.kwargs) + async with r: + r.raise_for_status() + out = await r.read() + self.cache = AllBytes( + size=len(out), fetcher=None, blocksize=None, data=out + ) + self.size = len(out) + + _fetch_all = sync_wrapper(async_fetch_all) + + def _parse_content_range(self, headers): + """Parse the Content-Range header""" + s = headers.get("Content-Range", "") + m = re.match(r"bytes (\d+-\d+|\*)/(\d+|\*)", s) + if not m: + return None, None, None + + if m[1] == "*": + start = end = None + else: + start, end = [int(x) for x in m[1].split("-")] + total = None if m[2] == "*" else int(m[2]) + return start, end, total + + async def async_fetch_range(self, start, end): + """Download a block of data + + The expectation is that the server returns only the requested bytes, + with HTTP code 206. If this is not the case, we first check the headers, + and then stream the output - if the data size is bigger than we + requested, an exception is raised. + """ + logger.debug(f"Fetch range for {self}: {start}-{end}") + kwargs = self.kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + logger.debug(f"{self.url} : {headers['Range']}") + r = await self.session.get( + self.fs.encode_url(self.url), headers=headers, **kwargs + ) + async with r: + if r.status == 416: + # range request outside file + return b"" + r.raise_for_status() + + # If the server has handled the range request, it should reply + # with status 206 (partial content). But we'll guess that a suitable + # Content-Range header or a Content-Length no more than the + # requested range also mean we have got the desired range. + response_is_range = ( + r.status == 206 + or self._parse_content_range(r.headers)[0] == start + or int(r.headers.get("Content-Length", end + 1)) <= end - start + ) + + if response_is_range: + # partial content, as expected + out = await r.read() + elif start > 0: + raise ValueError( + "The HTTP server doesn't appear to support range requests. " + "Only reading this file from the beginning is supported. " + "Open with block_size=0 for a streaming file interface." + ) + else: + # Response is not a range, but we want the start of the file, + # so we can read the required amount anyway. + cl = 0 + out = [] + while True: + chunk = await r.content.read(2**20) + # data size unknown, let's read until we have enough + if chunk: + out.append(chunk) + cl += len(chunk) + if cl > end - start: + break + else: + break + out = b"".join(out)[: end - start] + return out + + _fetch_range = sync_wrapper(async_fetch_range) + + +magic_check = re.compile("([*[])") + + +def has_magic(s): + match = magic_check.search(s) + return match is not None + + +class HTTPStreamFile(AbstractBufferedFile): + def __init__(self, fs, url, mode="rb", loop=None, session=None, **kwargs): + self.asynchronous = kwargs.pop("asynchronous", False) + self.url = url + self.loop = loop + self.session = session + if mode != "rb": + raise ValueError + self.details = {"name": url, "size": None} + super().__init__(fs=fs, path=url, mode=mode, cache_type="none", **kwargs) + + async def cor(): + r = await self.session.get(self.fs.encode_url(url), **kwargs).__aenter__() + self.fs._raise_not_found_for_status(r, url) + return r + + self.r = sync(self.loop, cor) + self.loop = fs.loop + + def seek(self, loc, whence=0): + if loc == 0 and whence == 1: + return + if loc == self.loc and whence == 0: + return + raise ValueError("Cannot seek streaming HTTP file") + + async def _read(self, num=-1): + out = await self.r.content.read(num) + self.loc += len(out) + return out + + read = sync_wrapper(_read) + + async def _close(self): + self.r.close() + + def close(self): + asyncio.run_coroutine_threadsafe(self._close(), self.loop) + super().close() + + +class AsyncStreamFile(AbstractAsyncStreamedFile): + def __init__( + self, fs, url, mode="rb", loop=None, session=None, size=None, **kwargs + ): + self.url = url + self.session = session + self.r = None + if mode != "rb": + raise ValueError + self.details = {"name": url, "size": None} + self.kwargs = kwargs + super().__init__(fs=fs, path=url, mode=mode, cache_type="none") + self.size = size + + async def read(self, num=-1): + if self.r is None: + r = await self.session.get( + self.fs.encode_url(self.url), **self.kwargs + ).__aenter__() + self.fs._raise_not_found_for_status(r, self.url) + self.r = r + out = await self.r.content.read(num) + self.loc += len(out) + return out + + async def close(self): + if self.r is not None: + self.r.close() + self.r = None + await super().close() + + +async def get_range(session, url, start, end, file=None, **kwargs): + # explicit get a range when we know it must be safe + kwargs = kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + r = await session.get(url, headers=headers, **kwargs) + r.raise_for_status() + async with r: + out = await r.read() + if file: + with open(file, "r+b") as f: # noqa: ASYNC230 + f.seek(start) + f.write(out) + else: + return out + + +async def _file_info(url, session, size_policy="head", **kwargs): + """Call HEAD on the server to get details about the file (size/checksum etc.) + + Default operation is to explicitly allow redirects and use encoding + 'identity' (no compression) to get the true size of the target. + """ + logger.debug("Retrieve file size for %s", url) + kwargs = kwargs.copy() + ar = kwargs.pop("allow_redirects", True) + head = kwargs.get("headers", {}).copy() + head["Accept-Encoding"] = "identity" + kwargs["headers"] = head + + info = {} + if size_policy == "head": + r = await session.head(url, allow_redirects=ar, **kwargs) + elif size_policy == "get": + r = await session.get(url, allow_redirects=ar, **kwargs) + else: + raise TypeError(f'size_policy must be "head" or "get", got {size_policy}') + async with r: + r.raise_for_status() + + if "Content-Length" in r.headers: + # Some servers may choose to ignore Accept-Encoding and return + # compressed content, in which case the returned size is unreliable. + if "Content-Encoding" not in r.headers or r.headers["Content-Encoding"] in [ + "identity", + "", + ]: + info["size"] = int(r.headers["Content-Length"]) + elif "Content-Range" in r.headers: + info["size"] = int(r.headers["Content-Range"].split("/")[1]) + + if "Content-Type" in r.headers: + info["mimetype"] = r.headers["Content-Type"].partition(";")[0] + + if r.headers.get("Accept-Ranges") == "none": + # Some servers may explicitly discourage partial content requests, but + # the lack of "Accept-Ranges" does not always indicate they would fail + info["partial"] = False + + info["url"] = str(r.url) + + for checksum_field in ["ETag", "Content-MD5", "Digest"]: + if r.headers.get(checksum_field): + info[checksum_field] = r.headers[checksum_field] + + return info + + +async def _file_size(url, session=None, *args, **kwargs): + if session is None: + session = await get_client() + info = await _file_info(url, session=session, *args, **kwargs) + return info.get("size") + + +file_size = sync_wrapper(_file_size) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http_sync.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http_sync.py new file mode 100644 index 0000000000000000000000000000000000000000..08799f20acde91b19dcdb4fbb02313bc64f9257a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/http_sync.py @@ -0,0 +1,931 @@ +"""This file is largely copied from http.py""" + +import io +import logging +import re +import urllib.error +import urllib.parse +from copy import copy +from json import dumps, loads +from urllib.parse import urlparse + +try: + import yarl +except (ImportError, ModuleNotFoundError, OSError): + yarl = False + +from fsspec.callbacks import _DEFAULT_CALLBACK +from fsspec.registry import register_implementation +from fsspec.spec import AbstractBufferedFile, AbstractFileSystem +from fsspec.utils import DEFAULT_BLOCK_SIZE, isfilelike, nullcontext, tokenize + +from ..caching import AllBytes + +# https://stackoverflow.com/a/15926317/3821154 +ex = re.compile(r"""<(a|A)\s+(?:[^>]*?\s+)?(href|HREF)=["'](?P[^"']+)""") +ex2 = re.compile(r"""(?Phttp[s]?://[-a-zA-Z0-9@:%_+.~#?&/=]+)""") +logger = logging.getLogger("fsspec.http") + + +class JsHttpException(urllib.error.HTTPError): ... + + +class StreamIO(io.BytesIO): + # fake class, so you can set attributes on it + # will eventually actually stream + ... + + +class ResponseProxy: + """Looks like a requests response""" + + def __init__(self, req, stream=False): + self.request = req + self.stream = stream + self._data = None + self._headers = None + + @property + def raw(self): + if self._data is None: + b = self.request.response.to_bytes() + if self.stream: + self._data = StreamIO(b) + else: + self._data = b + return self._data + + def close(self): + if hasattr(self, "_data"): + del self._data + + @property + def headers(self): + if self._headers is None: + self._headers = dict( + [ + _.split(": ") + for _ in self.request.getAllResponseHeaders().strip().split("\r\n") + ] + ) + return self._headers + + @property + def status_code(self): + return int(self.request.status) + + def raise_for_status(self): + if not self.ok: + raise JsHttpException( + self.url, self.status_code, self.reason, self.headers, None + ) + + def iter_content(self, chunksize, *_, **__): + while True: + out = self.raw.read(chunksize) + if out: + yield out + else: + break + + @property + def reason(self): + return self.request.statusText + + @property + def ok(self): + return self.status_code < 400 + + @property + def url(self): + return self.request.response.responseURL + + @property + def text(self): + # TODO: encoding from headers + return self.content.decode() + + @property + def content(self): + self.stream = False + return self.raw + + def json(self): + return loads(self.text) + + +class RequestsSessionShim: + def __init__(self): + self.headers = {} + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=None, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + from js import Blob, XMLHttpRequest + + logger.debug("JS request: %s %s", method, url) + + if cert or verify or proxies or files or cookies or hooks: + raise NotImplementedError + if data and json: + raise ValueError("Use json= or data=, not both") + req = XMLHttpRequest.new() + extra = auth if auth else () + if params: + url = f"{url}?{urllib.parse.urlencode(params)}" + req.open(method, url, False, *extra) + if timeout: + req.timeout = timeout + if headers: + for k, v in headers.items(): + req.setRequestHeader(k, v) + + req.setRequestHeader("Accept", "application/octet-stream") + req.responseType = "arraybuffer" + if json: + blob = Blob.new([dumps(data)], {type: "application/json"}) + req.send(blob) + elif data: + if isinstance(data, io.IOBase): + data = data.read() + blob = Blob.new([data], {type: "application/octet-stream"}) + req.send(blob) + else: + req.send(None) + return ResponseProxy(req, stream=stream) + + def get(self, url, **kwargs): + return self.request("GET", url, **kwargs) + + def head(self, url, **kwargs): + return self.request("HEAD", url, **kwargs) + + def post(self, url, **kwargs): + return self.request("POST}", url, **kwargs) + + def put(self, url, **kwargs): + return self.request("PUT", url, **kwargs) + + def patch(self, url, **kwargs): + return self.request("PATCH", url, **kwargs) + + def delete(self, url, **kwargs): + return self.request("DELETE", url, **kwargs) + + +class HTTPFileSystem(AbstractFileSystem): + """ + Simple File-System for fetching data via HTTP(S) + + This is the BLOCKING version of the normal HTTPFileSystem. It uses + requests in normal python and the JS runtime in pyodide. + + ***This implementation is extremely experimental, do not use unless + you are testing pyodide/pyscript integration*** + """ + + protocol = ("http", "https", "sync-http", "sync-https") + sep = "/" + + def __init__( + self, + simple_links=True, + block_size=None, + same_scheme=True, + cache_type="readahead", + cache_options=None, + client_kwargs=None, + encoded=False, + **storage_options, + ): + """ + + Parameters + ---------- + block_size: int + Blocks to read bytes; if 0, will default to raw requests file-like + objects instead of HTTPFile instances + simple_links: bool + If True, will consider both HTML tags and anything that looks + like a URL; if False, will consider only the former. + same_scheme: True + When doing ls/glob, if this is True, only consider paths that have + http/https matching the input URLs. + size_policy: this argument is deprecated + client_kwargs: dict + Passed to aiohttp.ClientSession, see + https://docs.aiohttp.org/en/stable/client_reference.html + For example, ``{'auth': aiohttp.BasicAuth('user', 'pass')}`` + storage_options: key-value + Any other parameters passed on to requests + cache_type, cache_options: defaults used in open + """ + super().__init__(self, **storage_options) + self.block_size = block_size if block_size is not None else DEFAULT_BLOCK_SIZE + self.simple_links = simple_links + self.same_schema = same_scheme + self.cache_type = cache_type + self.cache_options = cache_options + self.client_kwargs = client_kwargs or {} + self.encoded = encoded + self.kwargs = storage_options + + try: + import js # noqa: F401 + + logger.debug("Starting JS session") + self.session = RequestsSessionShim() + self.js = True + except Exception as e: + import requests + + logger.debug("Starting cpython session because of: %s", e) + self.session = requests.Session(**(client_kwargs or {})) + self.js = False + + request_options = copy(storage_options) + self.use_listings_cache = request_options.pop("use_listings_cache", False) + request_options.pop("listings_expiry_time", None) + request_options.pop("max_paths", None) + request_options.pop("skip_instance_cache", None) + self.kwargs = request_options + + @property + def fsid(self): + return "sync-http" + + def encode_url(self, url): + if yarl: + return yarl.URL(url, encoded=self.encoded) + return url + + @classmethod + def _strip_protocol(cls, path: str) -> str: + """For HTTP, we always want to keep the full URL""" + path = path.replace("sync-http://", "http://").replace( + "sync-https://", "https://" + ) + return path + + @classmethod + def _parent(cls, path): + # override, since _strip_protocol is different for URLs + par = super()._parent(path) + if len(par) > 7: # "http://..." + return par + return "" + + def _ls_real(self, url, detail=True, **kwargs): + # ignoring URL-encoded arguments + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + r = self.session.get(self.encode_url(url), **self.kwargs) + self._raise_not_found_for_status(r, url) + text = r.text + if self.simple_links: + links = ex2.findall(text) + [u[2] for u in ex.findall(text)] + else: + links = [u[2] for u in ex.findall(text)] + out = set() + parts = urlparse(url) + for l in links: + if isinstance(l, tuple): + l = l[1] + if l.startswith("/") and len(l) > 1: + # absolute URL on this server + l = parts.scheme + "://" + parts.netloc + l + if l.startswith("http"): + if self.same_schema and l.startswith(url.rstrip("/") + "/"): + out.add(l) + elif l.replace("https", "http").startswith( + url.replace("https", "http").rstrip("/") + "/" + ): + # allowed to cross http <-> https + out.add(l) + else: + if l not in ["..", "../"]: + # Ignore FTP-like "parent" + out.add("/".join([url.rstrip("/"), l.lstrip("/")])) + if not out and url.endswith("/"): + out = self._ls_real(url.rstrip("/"), detail=False) + if detail: + return [ + { + "name": u, + "size": None, + "type": "directory" if u.endswith("/") else "file", + } + for u in out + ] + else: + return sorted(out) + + def ls(self, url, detail=True, **kwargs): + if self.use_listings_cache and url in self.dircache: + out = self.dircache[url] + else: + out = self._ls_real(url, detail=detail, **kwargs) + self.dircache[url] = out + return out + + def _raise_not_found_for_status(self, response, url): + """ + Raises FileNotFoundError for 404s, otherwise uses raise_for_status. + """ + if response.status_code == 404: + raise FileNotFoundError(url) + response.raise_for_status() + + def cat_file(self, url, start=None, end=None, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + + if start is not None or end is not None: + if start == end: + return b"" + headers = kw.pop("headers", {}).copy() + + headers["Range"] = self._process_limits(url, start, end) + kw["headers"] = headers + r = self.session.get(self.encode_url(url), **kw) + self._raise_not_found_for_status(r, url) + return r.content + + def get_file( + self, rpath, lpath, chunk_size=5 * 2**20, callback=_DEFAULT_CALLBACK, **kwargs + ): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(rpath) + r = self.session.get(self.encode_url(rpath), **kw) + try: + size = int( + r.headers.get("content-length", None) + or r.headers.get("Content-Length", None) + ) + except (ValueError, KeyError, TypeError): + size = None + + callback.set_size(size) + self._raise_not_found_for_status(r, rpath) + if not isfilelike(lpath): + lpath = open(lpath, "wb") + for chunk in r.iter_content(chunk_size, decode_unicode=False): + lpath.write(chunk) + callback.relative_update(len(chunk)) + + def put_file( + self, + lpath, + rpath, + chunk_size=5 * 2**20, + callback=_DEFAULT_CALLBACK, + method="post", + **kwargs, + ): + def gen_chunks(): + # Support passing arbitrary file-like objects + # and use them instead of streams. + if isinstance(lpath, io.IOBase): + context = nullcontext(lpath) + use_seek = False # might not support seeking + else: + context = open(lpath, "rb") + use_seek = True + + with context as f: + if use_seek: + callback.set_size(f.seek(0, 2)) + f.seek(0) + else: + callback.set_size(getattr(f, "size", None)) + + chunk = f.read(chunk_size) + while chunk: + yield chunk + callback.relative_update(len(chunk)) + chunk = f.read(chunk_size) + + kw = self.kwargs.copy() + kw.update(kwargs) + + method = method.lower() + if method not in ("post", "put"): + raise ValueError( + f"method has to be either 'post' or 'put', not: {method!r}" + ) + + meth = getattr(self.session, method) + resp = meth(rpath, data=gen_chunks(), **kw) + self._raise_not_found_for_status(resp, rpath) + + def _process_limits(self, url, start, end): + """Helper for "Range"-based _cat_file""" + size = None + suff = False + if start is not None and start < 0: + # if start is negative and end None, end is the "suffix length" + if end is None: + end = -start + start = "" + suff = True + else: + size = size or self.info(url)["size"] + start = size + start + elif start is None: + start = 0 + if not suff: + if end is not None and end < 0: + if start is not None: + size = size or self.info(url)["size"] + end = size + end + elif end is None: + end = "" + if isinstance(end, int): + end -= 1 # bytes range is inclusive + return f"bytes={start}-{end}" + + def exists(self, path, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + try: + logger.debug(path) + r = self.session.get(self.encode_url(path), **kw) + return r.status_code < 400 + except Exception: + return False + + def isfile(self, path, **kwargs): + return self.exists(path, **kwargs) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=None, # XXX: This differs from the base class. + cache_type=None, + cache_options=None, + size=None, + **kwargs, + ): + """Make a file-like object + + Parameters + ---------- + path: str + Full URL with protocol + mode: string + must be "rb" + block_size: int or None + Bytes to download in one request; use instance value if None. If + zero, will return a streaming Requests file-like instance. + kwargs: key-value + Any other parameters, passed to requests calls + """ + if mode != "rb": + raise NotImplementedError + block_size = block_size if block_size is not None else self.block_size + kw = self.kwargs.copy() + kw.update(kwargs) + size = size or self.info(path, **kwargs)["size"] + if block_size and size: + return HTTPFile( + self, + path, + session=self.session, + block_size=block_size, + mode=mode, + size=size, + cache_type=cache_type or self.cache_type, + cache_options=cache_options or self.cache_options, + **kw, + ) + else: + return HTTPStreamFile( + self, + path, + mode=mode, + session=self.session, + **kw, + ) + + def ukey(self, url): + """Unique identifier; assume HTTP files are static, unchanging""" + return tokenize(url, self.kwargs, self.protocol) + + def info(self, url, **kwargs): + """Get info of URL + + Tries to access location via HEAD, and then GET methods, but does + not fetch the data. + + It is possible that the server does not supply any size information, in + which case size will be given as None (and certain operations on the + corresponding file will not work). + """ + info = {} + for policy in ["head", "get"]: + try: + info.update( + _file_info( + self.encode_url(url), + size_policy=policy, + session=self.session, + **self.kwargs, + **kwargs, + ) + ) + if info.get("size") is not None: + break + except Exception as exc: + if policy == "get": + # If get failed, then raise a FileNotFoundError + raise FileNotFoundError(url) from exc + logger.debug(str(exc)) + + return {"name": url, "size": None, **info, "type": "file"} + + def glob(self, path, maxdepth=None, **kwargs): + """ + Find files by glob-matching. + + This implementation is idntical to the one in AbstractFileSystem, + but "?" is not considered as a character for globbing, because it is + so common in URLs, often identifying the "query" part. + """ + import re + + ends = path.endswith("/") + path = self._strip_protocol(path) + indstar = path.find("*") if path.find("*") >= 0 else len(path) + indbrace = path.find("[") if path.find("[") >= 0 else len(path) + + ind = min(indstar, indbrace) + + detail = kwargs.pop("detail", False) + + if not has_magic(path): + root = path + depth = 1 + if ends: + path += "/*" + elif self.exists(path): + if not detail: + return [path] + else: + return {path: self.info(path)} + else: + if not detail: + return [] # glob of non-existent returns empty + else: + return {} + elif "/" in path[:ind]: + ind2 = path[:ind].rindex("/") + root = path[: ind2 + 1] + depth = None if "**" in path else path[ind2 + 1 :].count("/") + 1 + else: + root = "" + depth = None if "**" in path else path[ind + 1 :].count("/") + 1 + + allpaths = self.find( + root, maxdepth=maxdepth or depth, withdirs=True, detail=True, **kwargs + ) + # Escape characters special to python regex, leaving our supported + # special characters in place. + # See https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html + # for shell globbing details. + pattern = ( + "^" + + ( + path.replace("\\", r"\\") + .replace(".", r"\.") + .replace("+", r"\+") + .replace("//", "/") + .replace("(", r"\(") + .replace(")", r"\)") + .replace("|", r"\|") + .replace("^", r"\^") + .replace("$", r"\$") + .replace("{", r"\{") + .replace("}", r"\}") + .rstrip("/") + ) + + "$" + ) + pattern = re.sub("[*]{2}", "=PLACEHOLDER=", pattern) + pattern = re.sub("[*]", "[^/]*", pattern) + pattern = re.compile(pattern.replace("=PLACEHOLDER=", ".*")) + out = { + p: allpaths[p] + for p in sorted(allpaths) + if pattern.match(p.replace("//", "/").rstrip("/")) + } + if detail: + return out + else: + return list(out) + + def isdir(self, path): + # override, since all URLs are (also) files + try: + return bool(self.ls(path)) + except (FileNotFoundError, ValueError): + return False + + +class HTTPFile(AbstractBufferedFile): + """ + A file-like object pointing to a remove HTTP(S) resource + + Supports only reading, with read-ahead of a predermined block-size. + + In the case that the server does not supply the filesize, only reading of + the complete file in one go is supported. + + Parameters + ---------- + url: str + Full URL of the remote resource, including the protocol + session: requests.Session or None + All calls will be made within this session, to avoid restarting + connections where the server allows this + block_size: int or None + The amount of read-ahead to do, in bytes. Default is 5MB, or the value + configured for the FileSystem creating this file + size: None or int + If given, this is the size of the file in bytes, and we don't attempt + to call the server to find the value. + kwargs: all other key-values are passed to requests calls. + """ + + def __init__( + self, + fs, + url, + session=None, + block_size=None, + mode="rb", + cache_type="bytes", + cache_options=None, + size=None, + **kwargs, + ): + if mode != "rb": + raise NotImplementedError("File mode not supported") + self.url = url + self.session = session + self.details = {"name": url, "size": size, "type": "file"} + super().__init__( + fs=fs, + path=url, + mode=mode, + block_size=block_size, + cache_type=cache_type, + cache_options=cache_options, + **kwargs, + ) + + def read(self, length=-1): + """Read bytes from file + + Parameters + ---------- + length: int + Read up to this many bytes. If negative, read all content to end of + file. If the server has not supplied the filesize, attempting to + read only part of the data will raise a ValueError. + """ + if ( + (length < 0 and self.loc == 0) # explicit read all + # but not when the size is known and fits into a block anyways + and not (self.size is not None and self.size <= self.blocksize) + ): + self._fetch_all() + if self.size is None: + if length < 0: + self._fetch_all() + else: + length = min(self.size - self.loc, length) + return super().read(length) + + def _fetch_all(self): + """Read whole file in one shot, without caching + + This is only called when position is still at zero, + and read() is called without a byte-count. + """ + logger.debug(f"Fetch all for {self}") + if not isinstance(self.cache, AllBytes): + r = self.session.get(self.fs.encode_url(self.url), **self.kwargs) + r.raise_for_status() + out = r.content + self.cache = AllBytes(size=len(out), fetcher=None, blocksize=None, data=out) + self.size = len(out) + + def _parse_content_range(self, headers): + """Parse the Content-Range header""" + s = headers.get("Content-Range", "") + m = re.match(r"bytes (\d+-\d+|\*)/(\d+|\*)", s) + if not m: + return None, None, None + + if m[1] == "*": + start = end = None + else: + start, end = [int(x) for x in m[1].split("-")] + total = None if m[2] == "*" else int(m[2]) + return start, end, total + + def _fetch_range(self, start, end): + """Download a block of data + + The expectation is that the server returns only the requested bytes, + with HTTP code 206. If this is not the case, we first check the headers, + and then stream the output - if the data size is bigger than we + requested, an exception is raised. + """ + logger.debug(f"Fetch range for {self}: {start}-{end}") + kwargs = self.kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + logger.debug("%s : %s", self.url, headers["Range"]) + r = self.session.get(self.fs.encode_url(self.url), headers=headers, **kwargs) + if r.status_code == 416: + # range request outside file + return b"" + r.raise_for_status() + + # If the server has handled the range request, it should reply + # with status 206 (partial content). But we'll guess that a suitable + # Content-Range header or a Content-Length no more than the + # requested range also mean we have got the desired range. + cl = r.headers.get("Content-Length", r.headers.get("content-length", end + 1)) + response_is_range = ( + r.status_code == 206 + or self._parse_content_range(r.headers)[0] == start + or int(cl) <= end - start + ) + + if response_is_range: + # partial content, as expected + out = r.content + elif start > 0: + raise ValueError( + "The HTTP server doesn't appear to support range requests. " + "Only reading this file from the beginning is supported. " + "Open with block_size=0 for a streaming file interface." + ) + else: + # Response is not a range, but we want the start of the file, + # so we can read the required amount anyway. + cl = 0 + out = [] + for chunk in r.iter_content(2**20, False): + out.append(chunk) + cl += len(chunk) + out = b"".join(out)[: end - start] + return out + + +magic_check = re.compile("([*[])") + + +def has_magic(s): + match = magic_check.search(s) + return match is not None + + +class HTTPStreamFile(AbstractBufferedFile): + def __init__(self, fs, url, mode="rb", session=None, **kwargs): + self.url = url + self.session = session + if mode != "rb": + raise ValueError + self.details = {"name": url, "size": None} + super().__init__(fs=fs, path=url, mode=mode, cache_type="readahead", **kwargs) + + r = self.session.get(self.fs.encode_url(url), stream=True, **kwargs) + self.fs._raise_not_found_for_status(r, url) + self.it = r.iter_content(1024, False) + self.leftover = b"" + + self.r = r + + def seek(self, *args, **kwargs): + raise ValueError("Cannot seek streaming HTTP file") + + def read(self, num=-1): + bufs = [self.leftover] + leng = len(self.leftover) + while leng < num or num < 0: + try: + out = self.it.__next__() + except StopIteration: + break + if out: + bufs.append(out) + else: + break + leng += len(out) + out = b"".join(bufs) + if num >= 0: + self.leftover = out[num:] + out = out[:num] + else: + self.leftover = b"" + self.loc += len(out) + return out + + def close(self): + self.r.close() + self.closed = True + + +def get_range(session, url, start, end, **kwargs): + # explicit get a range when we know it must be safe + kwargs = kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + r = session.get(url, headers=headers, **kwargs) + r.raise_for_status() + return r.content + + +def _file_info(url, session, size_policy="head", **kwargs): + """Call HEAD on the server to get details about the file (size/checksum etc.) + + Default operation is to explicitly allow redirects and use encoding + 'identity' (no compression) to get the true size of the target. + """ + logger.debug("Retrieve file size for %s", url) + kwargs = kwargs.copy() + ar = kwargs.pop("allow_redirects", True) + head = kwargs.get("headers", {}).copy() + # TODO: not allowed in JS + # head["Accept-Encoding"] = "identity" + kwargs["headers"] = head + + info = {} + if size_policy == "head": + r = session.head(url, allow_redirects=ar, **kwargs) + elif size_policy == "get": + r = session.get(url, allow_redirects=ar, **kwargs) + else: + raise TypeError(f'size_policy must be "head" or "get", got {size_policy}') + r.raise_for_status() + + # TODO: + # recognise lack of 'Accept-Ranges', + # or 'Accept-Ranges': 'none' (not 'bytes') + # to mean streaming only, no random access => return None + if "Content-Length" in r.headers: + info["size"] = int(r.headers["Content-Length"]) + elif "Content-Range" in r.headers: + info["size"] = int(r.headers["Content-Range"].split("/")[1]) + elif "content-length" in r.headers: + info["size"] = int(r.headers["content-length"]) + elif "content-range" in r.headers: + info["size"] = int(r.headers["content-range"].split("/")[1]) + + for checksum_field in ["ETag", "Content-MD5", "Digest"]: + if r.headers.get(checksum_field): + info[checksum_field] = r.headers[checksum_field] + + return info + + +# importing this is enough to register it +def register(): + register_implementation("http", HTTPFileSystem, clobber=True) + register_implementation("https", HTTPFileSystem, clobber=True) + register_implementation("sync-http", HTTPFileSystem, clobber=True) + register_implementation("sync-https", HTTPFileSystem, clobber=True) + + +register() + + +def unregister(): + from fsspec.implementations.http import HTTPFileSystem + + register_implementation("http", HTTPFileSystem, clobber=True) + register_implementation("https", HTTPFileSystem, clobber=True) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/jupyter.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/jupyter.py new file mode 100644 index 0000000000000000000000000000000000000000..2839f4c1feea56dddd54bdc00f0b884c8461d29e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/jupyter.py @@ -0,0 +1,124 @@ +import base64 +import io +import re + +import requests + +import fsspec + + +class JupyterFileSystem(fsspec.AbstractFileSystem): + """View of the files as seen by a Jupyter server (notebook or lab)""" + + protocol = ("jupyter", "jlab") + + def __init__(self, url, tok=None, **kwargs): + """ + + Parameters + ---------- + url : str + Base URL of the server, like "http://127.0.0.1:8888". May include + token in the string, which is given by the process when starting up + tok : str + If the token is obtained separately, can be given here + kwargs + """ + if "?" in url: + if tok is None: + try: + tok = re.findall("token=([a-z0-9]+)", url)[0] + except IndexError as e: + raise ValueError("Could not determine token") from e + url = url.split("?", 1)[0] + self.url = url.rstrip("/") + "/api/contents" + self.session = requests.Session() + if tok: + self.session.headers["Authorization"] = f"token {tok}" + + super().__init__(**kwargs) + + def ls(self, path, detail=True, **kwargs): + path = self._strip_protocol(path) + r = self.session.get(f"{self.url}/{path}") + if r.status_code == 404: + return FileNotFoundError(path) + r.raise_for_status() + out = r.json() + + if out["type"] == "directory": + out = out["content"] + else: + out = [out] + for o in out: + o["name"] = o.pop("path") + o.pop("content") + if o["type"] == "notebook": + o["type"] = "file" + if detail: + return out + return [o["name"] for o in out] + + def cat_file(self, path, start=None, end=None, **kwargs): + path = self._strip_protocol(path) + r = self.session.get(f"{self.url}/{path}") + if r.status_code == 404: + return FileNotFoundError(path) + r.raise_for_status() + out = r.json() + if out["format"] == "text": + # data should be binary + b = out["content"].encode() + else: + b = base64.b64decode(out["content"]) + return b[start:end] + + def pipe_file(self, path, value, **_): + path = self._strip_protocol(path) + json = { + "name": path.rsplit("/", 1)[-1], + "path": path, + "size": len(value), + "content": base64.b64encode(value).decode(), + "format": "base64", + "type": "file", + } + self.session.put(f"{self.url}/{path}", json=json) + + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if create_parents and "/" in path: + self.mkdir(path.rsplit("/", 1)[0], True) + json = { + "name": path.rsplit("/", 1)[-1], + "path": path, + "size": None, + "content": None, + "type": "directory", + } + self.session.put(f"{self.url}/{path}", json=json) + + def _rm(self, path): + path = self._strip_protocol(path) + self.session.delete(f"{self.url}/{path}") + + def _open(self, path, mode="rb", **kwargs): + path = self._strip_protocol(path) + if mode == "rb": + data = self.cat_file(path) + return io.BytesIO(data) + else: + return SimpleFileWriter(self, path, mode="wb") + + +class SimpleFileWriter(fsspec.spec.AbstractBufferedFile): + def _upload_chunk(self, final=False): + """Never uploads a chunk until file is done + + Not suitable for large files + """ + if final is False: + return False + self.buffer.seek(0) + data = self.buffer.read() + self.fs.pipe_file(self.path, data) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/libarchive.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/libarchive.py new file mode 100644 index 0000000000000000000000000000000000000000..eb6f145352e1989e0477e259be02d8d7f4d729e2 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/libarchive.py @@ -0,0 +1,213 @@ +from contextlib import contextmanager +from ctypes import ( + CFUNCTYPE, + POINTER, + c_int, + c_longlong, + c_void_p, + cast, + create_string_buffer, +) + +import libarchive +import libarchive.ffi as ffi + +from fsspec import open_files +from fsspec.archive import AbstractArchiveFileSystem +from fsspec.implementations.memory import MemoryFile +from fsspec.utils import DEFAULT_BLOCK_SIZE + +# Libarchive requires seekable files or memory only for certain archive +# types. However, since we read the directory first to cache the contents +# and also allow random access to any file, the file-like object needs +# to be seekable no matter what. + +# Seek call-backs (not provided in the libarchive python wrapper) +SEEK_CALLBACK = CFUNCTYPE(c_longlong, c_int, c_void_p, c_longlong, c_int) +read_set_seek_callback = ffi.ffi( + "read_set_seek_callback", [ffi.c_archive_p, SEEK_CALLBACK], c_int, ffi.check_int +) +new_api = hasattr(ffi, "NO_OPEN_CB") + + +@contextmanager +def custom_reader(file, format_name="all", filter_name="all", block_size=ffi.page_size): + """Read an archive from a seekable file-like object. + + The `file` object must support the standard `readinto` and 'seek' methods. + """ + buf = create_string_buffer(block_size) + buf_p = cast(buf, c_void_p) + + def read_func(archive_p, context, ptrptr): + # readinto the buffer, returns number of bytes read + length = file.readinto(buf) + # write the address of the buffer into the pointer + ptrptr = cast(ptrptr, POINTER(c_void_p)) + ptrptr[0] = buf_p + # tell libarchive how much data was written into the buffer + return length + + def seek_func(archive_p, context, offset, whence): + file.seek(offset, whence) + # tell libarchvie the current position + return file.tell() + + read_cb = ffi.READ_CALLBACK(read_func) + seek_cb = SEEK_CALLBACK(seek_func) + + if new_api: + open_cb = ffi.NO_OPEN_CB + close_cb = ffi.NO_CLOSE_CB + else: + open_cb = libarchive.read.OPEN_CALLBACK(ffi.VOID_CB) + close_cb = libarchive.read.CLOSE_CALLBACK(ffi.VOID_CB) + + with libarchive.read.new_archive_read(format_name, filter_name) as archive_p: + read_set_seek_callback(archive_p, seek_cb) + ffi.read_open(archive_p, None, open_cb, read_cb, close_cb) + yield libarchive.read.ArchiveRead(archive_p) + + +class LibArchiveFileSystem(AbstractArchiveFileSystem): + """Compressed archives as a file-system (read-only) + + Supports the following formats: + tar, pax , cpio, ISO9660, zip, mtree, shar, ar, raw, xar, lha/lzh, rar + Microsoft CAB, 7-Zip, WARC + + See the libarchive documentation for further restrictions. + https://www.libarchive.org/ + + Keeps file object open while instance lives. It only works in seekable + file-like objects. In case the filesystem does not support this kind of + file object, it is recommended to cache locally. + + This class is pickleable, but not necessarily thread-safe (depends on the + platform). See libarchive documentation for details. + """ + + root_marker = "" + protocol = "libarchive" + cachable = False + + def __init__( + self, + fo="", + mode="r", + target_protocol=None, + target_options=None, + block_size=DEFAULT_BLOCK_SIZE, + **kwargs, + ): + """ + Parameters + ---------- + fo: str or file-like + Contains ZIP, and must exist. If a str, will fetch file using + :meth:`~fsspec.open_files`, which must return one file exactly. + mode: str + Currently, only 'r' accepted + target_protocol: str (optional) + If ``fo`` is a string, this value can be used to override the + FS protocol inferred from a URL + target_options: dict (optional) + Kwargs passed when instantiating the target FS, if ``fo`` is + a string. + """ + super().__init__(self, **kwargs) + if mode != "r": + raise ValueError("Only read from archive files accepted") + if isinstance(fo, str): + files = open_files(fo, protocol=target_protocol, **(target_options or {})) + if len(files) != 1: + raise ValueError( + f'Path "{fo}" did not resolve to exactly one file: "{files}"' + ) + fo = files[0] + self.of = fo + self.fo = fo.__enter__() # the whole instance is a context + self.block_size = block_size + self.dir_cache = None + + @contextmanager + def _open_archive(self): + self.fo.seek(0) + with custom_reader(self.fo, block_size=self.block_size) as arc: + yield arc + + @classmethod + def _strip_protocol(cls, path): + # file paths are always relative to the archive root + return super()._strip_protocol(path).lstrip("/") + + def _get_dirs(self): + fields = { + "name": "pathname", + "size": "size", + "created": "ctime", + "mode": "mode", + "uid": "uid", + "gid": "gid", + "mtime": "mtime", + } + + if self.dir_cache is not None: + return + + self.dir_cache = {} + list_names = [] + with self._open_archive() as arc: + for entry in arc: + if not entry.isdir and not entry.isfile: + # Skip symbolic links, fifo entries, etc. + continue + self.dir_cache.update( + { + dirname: {"name": dirname, "size": 0, "type": "directory"} + for dirname in self._all_dirnames(set(entry.name)) + } + ) + f = {key: getattr(entry, fields[key]) for key in fields} + f["type"] = "directory" if entry.isdir else "file" + list_names.append(entry.name) + + self.dir_cache[f["name"]] = f + # libarchive does not seem to return an entry for the directories (at least + # not in all formats), so get the directories names from the files names + self.dir_cache.update( + { + dirname: {"name": dirname, "size": 0, "type": "directory"} + for dirname in self._all_dirnames(list_names) + } + ) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + path = self._strip_protocol(path) + if mode != "rb": + raise NotImplementedError + + data = bytes() + with self._open_archive() as arc: + for entry in arc: + if entry.pathname != path: + continue + + if entry.size == 0: + # empty file, so there are no blocks + break + + for block in entry.get_blocks(entry.size): + data = block + break + else: + raise ValueError + return MemoryFile(fs=self, path=path, data=data) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/local.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/local.py new file mode 100644 index 0000000000000000000000000000000000000000..64dc8bb21956e9c5024b47d46d9e9550931891d8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/local.py @@ -0,0 +1,514 @@ +import datetime +import io +import logging +import os +import os.path as osp +import shutil +import stat +import tempfile +from functools import lru_cache + +from fsspec import AbstractFileSystem +from fsspec.compression import compr +from fsspec.core import get_compression +from fsspec.utils import isfilelike, stringify_path + +logger = logging.getLogger("fsspec.local") + + +class LocalFileSystem(AbstractFileSystem): + """Interface to files on local storage + + Parameters + ---------- + auto_mkdir: bool + Whether, when opening a file, the directory containing it should + be created (if it doesn't already exist). This is assumed by pyarrow + code. + """ + + root_marker = "/" + protocol = "file", "local" + local_file = True + + def __init__(self, auto_mkdir=False, **kwargs): + super().__init__(**kwargs) + self.auto_mkdir = auto_mkdir + + @property + def fsid(self): + return "local" + + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if self.exists(path): + raise FileExistsError(path) + if create_parents: + self.makedirs(path, exist_ok=True) + else: + os.mkdir(path, **kwargs) + + def makedirs(self, path, exist_ok=False): + path = self._strip_protocol(path) + os.makedirs(path, exist_ok=exist_ok) + + def rmdir(self, path): + path = self._strip_protocol(path) + os.rmdir(path) + + def ls(self, path, detail=False, **kwargs): + path = self._strip_protocol(path) + path_info = self.info(path) + infos = [] + if path_info["type"] == "directory": + with os.scandir(path) as it: + for f in it: + try: + # Only get the info if requested since it is a bit expensive (the stat call inside) + # The strip_protocol is also used in info() and calls make_path_posix to always return posix paths + info = self.info(f) if detail else self._strip_protocol(f.path) + infos.append(info) + except FileNotFoundError: + pass + else: + infos = [path_info] if detail else [path_info["name"]] + + return infos + + def info(self, path, **kwargs): + if isinstance(path, os.DirEntry): + # scandir DirEntry + out = path.stat(follow_symlinks=False) + link = path.is_symlink() + if path.is_dir(follow_symlinks=False): + t = "directory" + elif path.is_file(follow_symlinks=False): + t = "file" + else: + t = "other" + + size = out.st_size + if link: + try: + out2 = path.stat(follow_symlinks=True) + size = out2.st_size + except OSError: + size = 0 + path = self._strip_protocol(path.path) + else: + # str or path-like + path = self._strip_protocol(path) + out = os.stat(path, follow_symlinks=False) + link = stat.S_ISLNK(out.st_mode) + if link: + out = os.stat(path, follow_symlinks=True) + size = out.st_size + if stat.S_ISDIR(out.st_mode): + t = "directory" + elif stat.S_ISREG(out.st_mode): + t = "file" + else: + t = "other" + + # Check for the 'st_birthtime' attribute, which is not always present; fallback to st_ctime + created_time = getattr(out, "st_birthtime", out.st_ctime) + + result = { + "name": path, + "size": size, + "type": t, + "created": created_time, + "islink": link, + } + for field in ["mode", "uid", "gid", "mtime", "ino", "nlink"]: + result[field] = getattr(out, f"st_{field}") + if link: + result["destination"] = os.readlink(path) + return result + + def lexists(self, path, **kwargs): + return osp.lexists(path) + + def cp_file(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1) + path2 = self._strip_protocol(path2) + if self.auto_mkdir: + self.makedirs(self._parent(path2), exist_ok=True) + if self.isfile(path1): + shutil.copyfile(path1, path2) + elif self.isdir(path1): + self.mkdirs(path2, exist_ok=True) + else: + raise FileNotFoundError(path1) + + def isfile(self, path): + path = self._strip_protocol(path) + return os.path.isfile(path) + + def isdir(self, path): + path = self._strip_protocol(path) + return os.path.isdir(path) + + def get_file(self, path1, path2, callback=None, **kwargs): + if isfilelike(path2): + with open(path1, "rb") as f: + shutil.copyfileobj(f, path2) + else: + return self.cp_file(path1, path2, **kwargs) + + def put_file(self, path1, path2, callback=None, **kwargs): + return self.cp_file(path1, path2, **kwargs) + + def mv(self, path1, path2, recursive: bool = True, **kwargs): + """Move files/directories + For the specific case of local, all ops on directories are recursive and + the recursive= kwarg is ignored. + """ + path1 = self._strip_protocol(path1) + path2 = self._strip_protocol(path2) + shutil.move(path1, path2) + + def link(self, src, dst, **kwargs): + src = self._strip_protocol(src) + dst = self._strip_protocol(dst) + os.link(src, dst, **kwargs) + + def symlink(self, src, dst, **kwargs): + src = self._strip_protocol(src) + dst = self._strip_protocol(dst) + os.symlink(src, dst, **kwargs) + + def islink(self, path) -> bool: + return os.path.islink(self._strip_protocol(path)) + + def rm_file(self, path): + os.remove(self._strip_protocol(path)) + + def rm(self, path, recursive=False, maxdepth=None): + if not isinstance(path, list): + path = [path] + + for p in path: + p = self._strip_protocol(p) + if self.isdir(p): + if not recursive: + raise ValueError("Cannot delete directory, set recursive=True") + if osp.abspath(p) == os.getcwd(): + raise ValueError("Cannot delete current working directory") + shutil.rmtree(p) + else: + os.remove(p) + + def unstrip_protocol(self, name): + name = self._strip_protocol(name) # normalise for local/win/... + return f"file://{name}" + + def _open(self, path, mode="rb", block_size=None, **kwargs): + path = self._strip_protocol(path) + if self.auto_mkdir and "w" in mode: + self.makedirs(self._parent(path), exist_ok=True) + return LocalFileOpener(path, mode, fs=self, **kwargs) + + def touch(self, path, truncate=True, **kwargs): + path = self._strip_protocol(path) + if self.auto_mkdir: + self.makedirs(self._parent(path), exist_ok=True) + if self.exists(path): + os.utime(path, None) + else: + open(path, "a").close() + if truncate: + os.truncate(path, 0) + + def created(self, path): + info = self.info(path=path) + return datetime.datetime.fromtimestamp( + info["created"], tz=datetime.timezone.utc + ) + + def modified(self, path): + info = self.info(path=path) + return datetime.datetime.fromtimestamp(info["mtime"], tz=datetime.timezone.utc) + + @classmethod + def _parent(cls, path): + path = cls._strip_protocol(path) + if os.sep == "/": + # posix native + return path.rsplit("/", 1)[0] or "/" + else: + # NT + path_ = path.rsplit("/", 1)[0] + if len(path_) <= 3: + if path_[1:2] == ":": + # nt root (something like c:/) + return path_[0] + ":/" + # More cases may be required here + return path_ + + @classmethod + def _strip_protocol(cls, path): + path = stringify_path(path) + if path.startswith("file://"): + path = path[7:] + elif path.startswith("file:"): + path = path[5:] + elif path.startswith("local://"): + path = path[8:] + elif path.startswith("local:"): + path = path[6:] + + path = make_path_posix(path) + if os.sep != "/": + # This code-path is a stripped down version of + # > drive, path = ntpath.splitdrive(path) + if path[1:2] == ":": + # Absolute drive-letter path, e.g. X:\Windows + # Relative path with drive, e.g. X:Windows + drive, path = path[:2], path[2:] + elif path[:2] == "//": + # UNC drives, e.g. \\server\share or \\?\UNC\server\share + # Device drives, e.g. \\.\device or \\?\device + if (index1 := path.find("/", 2)) == -1 or ( + index2 := path.find("/", index1 + 1) + ) == -1: + drive, path = path, "" + else: + drive, path = path[:index2], path[index2:] + else: + # Relative path, e.g. Windows + drive = "" + + path = path.rstrip("/") or cls.root_marker + return drive + path + + else: + return path.rstrip("/") or cls.root_marker + + def _isfilestore(self): + # Inheriting from DaskFileSystem makes this False (S3, etc. were) + # the original motivation. But we are a posix-like file system. + # See https://github.com/dask/dask/issues/5526 + return True + + def chmod(self, path, mode): + path = stringify_path(path) + return os.chmod(path, mode) + + +def make_path_posix(path): + """Make path generic and absolute for current OS""" + if not isinstance(path, str): + if isinstance(path, (list, set, tuple)): + return type(path)(make_path_posix(p) for p in path) + else: + path = stringify_path(path) + if not isinstance(path, str): + raise TypeError(f"could not convert {path!r} to string") + if os.sep == "/": + # Native posix + if path.startswith("/"): + # most common fast case for posix + return path + elif path.startswith("~"): + return osp.expanduser(path) + elif path.startswith("./"): + path = path[2:] + elif path == ".": + path = "" + return f"{os.getcwd()}/{path}" + else: + # NT handling + if path[0:1] == "/" and path[2:3] == ":": + # path is like "/c:/local/path" + path = path[1:] + if path[1:2] == ":": + # windows full path like "C:\\local\\path" + if len(path) <= 3: + # nt root (something like c:/) + return path[0] + ":/" + path = path.replace("\\", "/") + return path + elif path[0:1] == "~": + return make_path_posix(osp.expanduser(path)) + elif path.startswith(("\\\\", "//")): + # windows UNC/DFS-style paths + return "//" + path[2:].replace("\\", "/") + elif path.startswith(("\\", "/")): + # windows relative path with root + path = path.replace("\\", "/") + return f"{osp.splitdrive(os.getcwd())[0]}{path}" + else: + path = path.replace("\\", "/") + if path.startswith("./"): + path = path[2:] + elif path == ".": + path = "" + return f"{make_path_posix(os.getcwd())}/{path}" + + +def trailing_sep(path): + """Return True if the path ends with a path separator. + + A forward slash is always considered a path separator, even on Operating + Systems that normally use a backslash. + """ + # TODO: if all incoming paths were posix-compliant then separator would + # always be a forward slash, simplifying this function. + # See https://github.com/fsspec/filesystem_spec/pull/1250 + return path.endswith(os.sep) or (os.altsep is not None and path.endswith(os.altsep)) + + +@lru_cache(maxsize=1) +def get_umask(mask: int = 0o666) -> int: + """Get the current umask. + + Follows https://stackoverflow.com/a/44130549 to get the umask. + Temporarily sets the umask to the given value, and then resets it to the + original value. + """ + value = os.umask(mask) + os.umask(value) + return value + + +class LocalFileOpener(io.IOBase): + def __init__( + self, path, mode, autocommit=True, fs=None, compression=None, **kwargs + ): + logger.debug("open file: %s", path) + self.path = path + self.mode = mode + self.fs = fs + self.f = None + self.autocommit = autocommit + self.compression = get_compression(path, compression) + self.blocksize = io.DEFAULT_BUFFER_SIZE + self._open() + + def _open(self): + if self.f is None or self.f.closed: + if self.autocommit or "w" not in self.mode: + self.f = open(self.path, mode=self.mode) + if self.compression: + compress = compr[self.compression] + self.f = compress(self.f, mode=self.mode) + else: + # TODO: check if path is writable? + i, name = tempfile.mkstemp() + os.close(i) # we want normal open and normal buffered file + self.temp = name + self.f = open(name, mode=self.mode) + if "w" not in self.mode: + self.size = self.f.seek(0, 2) + self.f.seek(0) + self.f.size = self.size + + def _fetch_range(self, start, end): + # probably only used by cached FS + if "r" not in self.mode: + raise ValueError + self._open() + self.f.seek(start) + return self.f.read(end - start) + + def __setstate__(self, state): + self.f = None + loc = state.pop("loc", None) + self.__dict__.update(state) + if "r" in state["mode"]: + self.f = None + self._open() + self.f.seek(loc) + + def __getstate__(self): + d = self.__dict__.copy() + d.pop("f") + if "r" in self.mode: + d["loc"] = self.f.tell() + else: + if not self.f.closed: + raise ValueError("Cannot serialise open write-mode local file") + return d + + def commit(self): + if self.autocommit: + raise RuntimeError("Can only commit if not already set to autocommit") + try: + shutil.move(self.temp, self.path) + except PermissionError as e: + # shutil.move raises PermissionError if os.rename + # and the default copy2 fallback with shutil.copystats fail. + # The file should be there nonetheless, but without copied permissions. + # If it doesn't exist, there was no permission to create the file. + if not os.path.exists(self.path): + raise e + else: + # If PermissionError is not raised, permissions can be set. + try: + mask = 0o666 + os.chmod(self.path, mask & ~get_umask(mask)) + except RuntimeError: + pass + + def discard(self): + if self.autocommit: + raise RuntimeError("Cannot discard if set to autocommit") + os.remove(self.temp) + + def readable(self) -> bool: + return True + + def writable(self) -> bool: + return "r" not in self.mode + + def read(self, *args, **kwargs): + return self.f.read(*args, **kwargs) + + def write(self, *args, **kwargs): + return self.f.write(*args, **kwargs) + + def tell(self, *args, **kwargs): + return self.f.tell(*args, **kwargs) + + def seek(self, *args, **kwargs): + return self.f.seek(*args, **kwargs) + + def seekable(self, *args, **kwargs): + return self.f.seekable(*args, **kwargs) + + def readline(self, *args, **kwargs): + return self.f.readline(*args, **kwargs) + + def readlines(self, *args, **kwargs): + return self.f.readlines(*args, **kwargs) + + def close(self): + return self.f.close() + + def truncate(self, size=None) -> int: + return self.f.truncate(size) + + @property + def closed(self): + return self.f.closed + + def fileno(self): + return self.raw.fileno() + + def flush(self) -> None: + self.f.flush() + + def __iter__(self): + return self.f.__iter__() + + def __getattr__(self, item): + return getattr(self.f, item) + + def __enter__(self): + self._incontext = True + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._incontext = False + self.f.__exit__(exc_type, exc_value, traceback) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/memory.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/memory.py new file mode 100644 index 0000000000000000000000000000000000000000..838876d1f87faa28ed05f02454e210fa534b736e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/memory.py @@ -0,0 +1,311 @@ +from __future__ import annotations + +import logging +from datetime import datetime, timezone +from errno import ENOTEMPTY +from io import BytesIO +from pathlib import PurePath, PureWindowsPath +from typing import Any, ClassVar + +from fsspec import AbstractFileSystem +from fsspec.implementations.local import LocalFileSystem +from fsspec.utils import stringify_path + +logger = logging.getLogger("fsspec.memoryfs") + + +class MemoryFileSystem(AbstractFileSystem): + """A filesystem based on a dict of BytesIO objects + + This is a global filesystem so instances of this class all point to the same + in memory filesystem. + """ + + store: ClassVar[dict[str, Any]] = {} # global, do not overwrite! + pseudo_dirs = [""] # global, do not overwrite! + protocol = "memory" + root_marker = "/" + + @classmethod + def _strip_protocol(cls, path): + if isinstance(path, PurePath): + if isinstance(path, PureWindowsPath): + return LocalFileSystem._strip_protocol(path) + else: + path = stringify_path(path) + + path = path.removeprefix("memory://") + if "::" in path or "://" in path: + return path.rstrip("/") + path = path.lstrip("/").rstrip("/") + return "/" + path if path else "" + + def ls(self, path, detail=True, **kwargs): + path = self._strip_protocol(path) + if path in self.store: + # there is a key with this exact name + if not detail: + return [path] + return [ + { + "name": path, + "size": self.store[path].size, + "type": "file", + "created": self.store[path].created.timestamp(), + } + ] + paths = set() + starter = path + "/" + out = [] + for p2 in tuple(self.store): + if p2.startswith(starter): + if "/" not in p2[len(starter) :]: + # exact child + out.append( + { + "name": p2, + "size": self.store[p2].size, + "type": "file", + "created": self.store[p2].created.timestamp(), + } + ) + elif len(p2) > len(starter): + # implied child directory + ppath = starter + p2[len(starter) :].split("/", 1)[0] + if ppath not in paths: + out = out or [] + out.append( + { + "name": ppath, + "size": 0, + "type": "directory", + } + ) + paths.add(ppath) + for p2 in self.pseudo_dirs: + if p2.startswith(starter): + if "/" not in p2[len(starter) :]: + # exact child pdir + if p2 not in paths: + out.append({"name": p2, "size": 0, "type": "directory"}) + paths.add(p2) + else: + # directory implied by deeper pdir + ppath = starter + p2[len(starter) :].split("/", 1)[0] + if ppath not in paths: + out.append({"name": ppath, "size": 0, "type": "directory"}) + paths.add(ppath) + if not out: + if path in self.pseudo_dirs: + # empty dir + return [] + raise FileNotFoundError(path) + if detail: + return out + return sorted([f["name"] for f in out]) + + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if path in self.store or path in self.pseudo_dirs: + raise FileExistsError(path) + if self._parent(path).strip("/") and self.isfile(self._parent(path)): + raise NotADirectoryError(self._parent(path)) + if create_parents and self._parent(path).strip("/"): + try: + self.mkdir(self._parent(path), create_parents, **kwargs) + except FileExistsError: + pass + if path and path not in self.pseudo_dirs: + self.pseudo_dirs.append(path) + + def makedirs(self, path, exist_ok=False): + try: + self.mkdir(path, create_parents=True) + except FileExistsError: + if not exist_ok: + raise + + def pipe_file(self, path, value, mode="overwrite", **kwargs): + """Set the bytes of given file + + Avoids copies of the data if possible + """ + mode = "xb" if mode == "create" else "wb" + self.open(path, mode=mode, data=value) + + def rmdir(self, path): + path = self._strip_protocol(path) + if path == "": + # silently avoid deleting FS root + return + if path in self.pseudo_dirs: + if not self.ls(path): + self.pseudo_dirs.remove(path) + else: + raise OSError(ENOTEMPTY, "Directory not empty", path) + else: + raise FileNotFoundError(path) + + def info(self, path, **kwargs): + logger.debug("info: %s", path) + path = self._strip_protocol(path) + if path in self.pseudo_dirs or any( + p.startswith(path + "/") for p in list(self.store) + self.pseudo_dirs + ): + return { + "name": path, + "size": 0, + "type": "directory", + } + elif path in self.store: + filelike = self.store[path] + return { + "name": path, + "size": filelike.size, + "type": "file", + "created": getattr(filelike, "created", None), + } + else: + raise FileNotFoundError(path) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + path = self._strip_protocol(path) + if "x" in mode and self.exists(path): + raise FileExistsError + if path in self.pseudo_dirs: + raise IsADirectoryError(path) + parent = path + while len(parent) > 1: + parent = self._parent(parent) + if self.isfile(parent): + raise FileExistsError(parent) + if mode in ["rb", "ab", "r+b"]: + if path in self.store: + f = self.store[path] + if mode == "ab": + # position at the end of file + f.seek(0, 2) + else: + # position at the beginning of file + f.seek(0) + return f + else: + raise FileNotFoundError(path) + elif mode in {"wb", "xb"}: + if mode == "xb" and self.exists(path): + raise FileExistsError + m = MemoryFile(self, path, kwargs.get("data")) + if not self._intrans: + m.commit() + return m + else: + name = self.__class__.__name__ + raise ValueError(f"unsupported file mode for {name}: {mode!r}") + + def cp_file(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1) + path2 = self._strip_protocol(path2) + if self.isfile(path1): + self.store[path2] = MemoryFile( + self, path2, self.store[path1].getvalue() + ) # implicit copy + elif self.isdir(path1): + if path2 not in self.pseudo_dirs: + self.pseudo_dirs.append(path2) + else: + raise FileNotFoundError(path1) + + def cat_file(self, path, start=None, end=None, **kwargs): + logger.debug("cat: %s", path) + path = self._strip_protocol(path) + try: + return bytes(self.store[path].getbuffer()[start:end]) + except KeyError as e: + raise FileNotFoundError(path) from e + + def _rm(self, path): + path = self._strip_protocol(path) + try: + del self.store[path] + except KeyError as e: + raise FileNotFoundError(path) from e + + def modified(self, path): + path = self._strip_protocol(path) + try: + return self.store[path].modified + except KeyError as e: + raise FileNotFoundError(path) from e + + def created(self, path): + path = self._strip_protocol(path) + try: + return self.store[path].created + except KeyError as e: + raise FileNotFoundError(path) from e + + def isfile(self, path): + path = self._strip_protocol(path) + return path in self.store + + def rm(self, path, recursive=False, maxdepth=None): + if isinstance(path, str): + path = self._strip_protocol(path) + else: + path = [self._strip_protocol(p) for p in path] + paths = self.expand_path(path, recursive=recursive, maxdepth=maxdepth) + for p in reversed(paths): + if self.isfile(p): + self.rm_file(p) + # If the expanded path doesn't exist, it is only because the expanded + # path was a directory that does not exist in self.pseudo_dirs. This + # is possible if you directly create files without making the + # directories first. + elif not self.exists(p): + continue + else: + self.rmdir(p) + + +class MemoryFile(BytesIO): + """A BytesIO which can't close and works as a context manager + + Can initialise with data. Each path should only be active once at any moment. + + No need to provide fs, path if auto-committing (default) + """ + + def __init__(self, fs=None, path=None, data=None): + logger.debug("open file %s", path) + self.fs = fs + self.path = path + self.created = datetime.now(tz=timezone.utc) + self.modified = datetime.now(tz=timezone.utc) + if data: + super().__init__(data) + self.seek(0) + + @property + def size(self): + return self.getbuffer().nbytes + + def __enter__(self): + return self + + def close(self): + pass + + def discard(self): + pass + + def commit(self): + self.fs.store[self.path] = self + self.modified = datetime.now(tz=timezone.utc) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/reference.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/reference.py new file mode 100644 index 0000000000000000000000000000000000000000..66be6ec7ca1cbebc5d80a328f7ed78becaeb0a37 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/reference.py @@ -0,0 +1,1305 @@ +import base64 +import collections +import io +import itertools +import logging +import math +import os +from functools import lru_cache +from itertools import chain +from typing import TYPE_CHECKING, Literal + +import fsspec.core +from fsspec.spec import AbstractBufferedFile + +try: + import ujson as json +except ImportError: + if not TYPE_CHECKING: + import json + +from fsspec.asyn import AsyncFileSystem +from fsspec.callbacks import DEFAULT_CALLBACK +from fsspec.core import filesystem, open, split_protocol +from fsspec.implementations.asyn_wrapper import AsyncFileSystemWrapper +from fsspec.utils import isfilelike, merge_offset_ranges, other_paths + +logger = logging.getLogger("fsspec.reference") + + +class ReferenceNotReachable(RuntimeError): + def __init__(self, reference, target, *args): + super().__init__(*args) + self.reference = reference + self.target = target + + def __str__(self): + return f'Reference "{self.reference}" failed to fetch target {self.target}' + + +def _first(d): + return next(iter(d.values())) + + +def _prot_in_references(path, references): + ref = references.get(path) + if isinstance(ref, (list, tuple)) and isinstance(ref[0], str): + return split_protocol(ref[0])[0] if ref[0] else ref[0] + + +def _protocol_groups(paths, references): + if isinstance(paths, str): + return {_prot_in_references(paths, references): [paths]} + out = {} + for path in paths: + protocol = _prot_in_references(path, references) + out.setdefault(protocol, []).append(path) + return out + + +class RefsValuesView(collections.abc.ValuesView): + def __iter__(self): + for val in self._mapping.zmetadata.values(): + yield json.dumps(val).encode() + yield from self._mapping._items.values() + for field in self._mapping.listdir(): + chunk_sizes = self._mapping._get_chunk_sizes(field) + if len(chunk_sizes) == 0: + yield self._mapping[field + "/0"] + continue + yield from self._mapping._generate_all_records(field) + + +class RefsItemsView(collections.abc.ItemsView): + def __iter__(self): + return zip(self._mapping.keys(), self._mapping.values()) + + +def ravel_multi_index(idx, sizes): + val = 0 + mult = 1 + for i, s in zip(idx[::-1], sizes[::-1]): + val += i * mult + mult *= s + return val + + +class LazyReferenceMapper(collections.abc.MutableMapping): + """This interface can be used to read/write references from Parquet stores. + It is not intended for other types of references. + It can be used with Kerchunk's MultiZarrToZarr method to combine + references into a parquet store. + Examples of this use-case can be found here: + https://fsspec.github.io/kerchunk/advanced.html?highlight=parquet#parquet-storage""" + + # import is class level to prevent numpy dep requirement for fsspec + @property + def np(self): + import numpy as np + + return np + + @property + def pd(self): + import pandas as pd + + return pd + + def __init__( + self, + root, + fs=None, + out_root=None, + cache_size=128, + categorical_threshold=10, + engine: Literal["fastparquet", "pyarrow"] = "fastparquet", + ): + """ + + This instance will be writable, storing changes in memory until full partitions + are accumulated or .flush() is called. + + To create an empty lazy store, use .create() + + Parameters + ---------- + root : str + Root of parquet store + fs : fsspec.AbstractFileSystem + fsspec filesystem object, default is local filesystem. + cache_size : int, default=128 + Maximum size of LRU cache, where cache_size*record_size denotes + the total number of references that can be loaded in memory at once. + categorical_threshold : int + Encode urls as pandas.Categorical to reduce memory footprint if the ratio + of the number of unique urls to total number of refs for each variable + is greater than or equal to this number. (default 10) + engine: Literal["fastparquet","pyarrow"] + Engine choice for reading parquet files. (default is "fastparquet") + """ + + self.root = root + self.chunk_sizes = {} + self.cat_thresh = categorical_threshold + self.engine = engine + self.cache_size = cache_size + self.url = self.root + "/{field}/refs.{record}.parq" + # TODO: derive fs from `root` + self.fs = fsspec.filesystem("file") if fs is None else fs + self.out_root = self.fs.unstrip_protocol(out_root or self.root) + + from importlib.util import find_spec + + if self.engine == "pyarrow" and find_spec("pyarrow") is None: + raise ImportError("engine choice `pyarrow` is not installed.") + + def __getattr__(self, item): + if item in ("_items", "record_size", "zmetadata"): + self.setup() + # avoid possible recursion if setup fails somehow + return self.__dict__[item] + raise AttributeError(item) + + def setup(self): + self._items = {} + self._items[".zmetadata"] = self.fs.cat_file( + "/".join([self.root, ".zmetadata"]) + ) + met = json.loads(self._items[".zmetadata"]) + self.record_size = met["record_size"] + self.zmetadata = met["metadata"] + + # Define function to open and decompress refs + @lru_cache(maxsize=self.cache_size) + def open_refs(field, record): + """cached parquet file loader""" + path = self.url.format(field=field, record=record) + data = io.BytesIO(self.fs.cat_file(path)) + try: + df = self.pd.read_parquet(data, engine=self.engine) + refs = {c: df[c].to_numpy() for c in df.columns} + except OSError: + refs = None + return refs + + self.open_refs = open_refs + + @staticmethod + def create(root, storage_options=None, fs=None, record_size=10000, **kwargs): + """Make empty parquet reference set + + First deletes the contents of the given directory, if it exists. + + Parameters + ---------- + root: str + Directory to contain the output; will be created + storage_options: dict | None + For making the filesystem to use for writing is fs is None + fs: FileSystem | None + Filesystem for writing + record_size: int + Number of references per parquet file + kwargs: passed to __init__ + + Returns + ------- + LazyReferenceMapper instance + """ + met = {"metadata": {}, "record_size": record_size} + if fs is None: + fs, root = fsspec.core.url_to_fs(root, **(storage_options or {})) + if fs.exists(root): + fs.rm(root, recursive=True) + fs.makedirs(root, exist_ok=True) + fs.pipe("/".join([root, ".zmetadata"]), json.dumps(met).encode()) + return LazyReferenceMapper(root, fs, **kwargs) + + @lru_cache() + def listdir(self): + """List top-level directories""" + dirs = (p.rsplit("/", 1)[0] for p in self.zmetadata if not p.startswith(".z")) + return set(dirs) + + def ls(self, path="", detail=True): + """Shortcut file listings""" + path = path.rstrip("/") + pathdash = path + "/" if path else "" + dirnames = self.listdir() + dirs = [ + d + for d in dirnames + if d.startswith(pathdash) and "/" not in d.lstrip(pathdash) + ] + if dirs: + others = { + f + for f in chain( + [".zmetadata"], + (name for name in self.zmetadata), + (name for name in self._items), + ) + if f.startswith(pathdash) and "/" not in f.lstrip(pathdash) + } + if detail is False: + others.update(dirs) + return sorted(others) + dirinfo = [{"name": name, "type": "directory", "size": 0} for name in dirs] + fileinfo = [ + { + "name": name, + "type": "file", + "size": len( + json.dumps(self.zmetadata[name]) + if name in self.zmetadata + else self._items[name] + ), + } + for name in others + ] + return sorted(dirinfo + fileinfo, key=lambda s: s["name"]) + field = path + others = set( + [name for name in self.zmetadata if name.startswith(f"{path}/")] + + [name for name in self._items if name.startswith(f"{path}/")] + ) + fileinfo = [ + { + "name": name, + "type": "file", + "size": len( + json.dumps(self.zmetadata[name]) + if name in self.zmetadata + else self._items[name] + ), + } + for name in others + ] + keys = self._keys_in_field(field) + + if detail is False: + return list(others) + list(keys) + recs = self._generate_all_records(field) + recinfo = [ + {"name": name, "type": "file", "size": rec[-1]} + for name, rec in zip(keys, recs) + if rec[0] # filters out path==None, deleted/missing + ] + return fileinfo + recinfo + + def _load_one_key(self, key): + """Get the reference for one key + + Returns bytes, one-element list or three-element list. + """ + if key in self._items: + return self._items[key] + elif key in self.zmetadata: + return json.dumps(self.zmetadata[key]).encode() + elif "/" not in key or self._is_meta(key): + raise KeyError(key) + field, _ = key.rsplit("/", 1) + record, ri, chunk_size = self._key_to_record(key) + maybe = self._items.get((field, record), {}).get(ri, False) + if maybe is None: + # explicitly deleted + raise KeyError + elif maybe: + return maybe + elif chunk_size == 0: + return b"" + + # Chunk keys can be loaded from row group and cached in LRU cache + try: + refs = self.open_refs(field, record) + except (ValueError, TypeError, FileNotFoundError) as exc: + raise KeyError(key) from exc + columns = ["path", "offset", "size", "raw"] + selection = [refs[c][ri] if c in refs else None for c in columns] + raw = selection[-1] + if raw is not None: + return raw + if selection[0] is None: + raise KeyError("This reference does not exist or has been deleted") + if selection[1:3] == [0, 0]: + # URL only + return selection[:1] + # URL, offset, size + return selection[:3] + + @lru_cache(4096) + def _key_to_record(self, key): + """Details needed to construct a reference for one key""" + field, chunk = key.rsplit("/", 1) + chunk_sizes = self._get_chunk_sizes(field) + if len(chunk_sizes) == 0: + return 0, 0, 0 + chunk_idx = [int(c) for c in chunk.split(".")] + chunk_number = ravel_multi_index(chunk_idx, chunk_sizes) + record = chunk_number // self.record_size + ri = chunk_number % self.record_size + return record, ri, len(chunk_sizes) + + def _get_chunk_sizes(self, field): + """The number of chunks along each axis for a given field""" + if field not in self.chunk_sizes: + zarray = self.zmetadata[f"{field}/.zarray"] + size_ratio = [ + math.ceil(s / c) for s, c in zip(zarray["shape"], zarray["chunks"]) + ] + self.chunk_sizes[field] = size_ratio or [1] + return self.chunk_sizes[field] + + def _generate_record(self, field, record): + """The references for a given parquet file of a given field""" + refs = self.open_refs(field, record) + it = iter(zip(*refs.values())) + if len(refs) == 3: + # All urls + return (list(t) for t in it) + elif len(refs) == 1: + # All raws + return refs["raw"] + else: + # Mix of urls and raws + return (list(t[:3]) if not t[3] else t[3] for t in it) + + def _generate_all_records(self, field): + """Load all the references within a field by iterating over the parquet files""" + nrec = 1 + for ch in self._get_chunk_sizes(field): + nrec *= ch + nrec = math.ceil(nrec / self.record_size) + for record in range(nrec): + yield from self._generate_record(field, record) + + def values(self): + return RefsValuesView(self) + + def items(self): + return RefsItemsView(self) + + def __hash__(self): + return id(self) + + def __getitem__(self, key): + return self._load_one_key(key) + + def __setitem__(self, key, value): + if "/" in key and not self._is_meta(key): + field, chunk = key.rsplit("/", 1) + record, i, _ = self._key_to_record(key) + subdict = self._items.setdefault((field, record), {}) + subdict[i] = value + if len(subdict) == self.record_size: + self.write(field, record) + else: + # metadata or top-level + if hasattr(value, "to_bytes"): + val = value.to_bytes().decode() + elif isinstance(value, bytes): + val = value.decode() + else: + val = value + self._items[key] = val + new_value = json.loads(val) + self.zmetadata[key] = {**self.zmetadata.get(key, {}), **new_value} + + @staticmethod + def _is_meta(key): + return key.startswith(".z") or "/.z" in key + + def __delitem__(self, key): + if key in self._items: + del self._items[key] + elif key in self.zmetadata: + del self.zmetadata[key] + else: + if "/" in key and not self._is_meta(key): + field, _ = key.rsplit("/", 1) + record, i, _ = self._key_to_record(key) + subdict = self._items.setdefault((field, record), {}) + subdict[i] = None + if len(subdict) == self.record_size: + self.write(field, record) + else: + # metadata or top-level + self._items[key] = None + + def write(self, field, record, base_url=None, storage_options=None): + # extra requirements if writing + import kerchunk.df + import numpy as np + import pandas as pd + + partition = self._items[(field, record)] + original = False + if len(partition) < self.record_size: + try: + original = self.open_refs(field, record) + except OSError: + pass + + if original: + paths = original["path"] + offsets = original["offset"] + sizes = original["size"] + raws = original["raw"] + else: + paths = np.full(self.record_size, np.nan, dtype="O") + offsets = np.zeros(self.record_size, dtype="int64") + sizes = np.zeros(self.record_size, dtype="int64") + raws = np.full(self.record_size, np.nan, dtype="O") + for j, data in partition.items(): + if isinstance(data, list): + if ( + str(paths.dtype) == "category" + and data[0] not in paths.dtype.categories + ): + paths = paths.add_categories(data[0]) + paths[j] = data[0] + if len(data) > 1: + offsets[j] = data[1] + sizes[j] = data[2] + elif data is None: + # delete + paths[j] = None + offsets[j] = 0 + sizes[j] = 0 + raws[j] = None + else: + # this is the only call into kerchunk, could remove + raws[j] = kerchunk.df._proc_raw(data) + # TODO: only save needed columns + df = pd.DataFrame( + { + "path": paths, + "offset": offsets, + "size": sizes, + "raw": raws, + }, + copy=False, + ) + if df.path.count() / (df.path.nunique() or 1) > self.cat_thresh: + df["path"] = df["path"].astype("category") + object_encoding = {"raw": "bytes", "path": "utf8"} + has_nulls = ["path", "raw"] + + fn = f"{base_url or self.out_root}/{field}/refs.{record}.parq" + self.fs.mkdirs(f"{base_url or self.out_root}/{field}", exist_ok=True) + + if self.engine == "pyarrow": + df_backend_kwargs = {"write_statistics": False} + elif self.engine == "fastparquet": + df_backend_kwargs = { + "stats": False, + "object_encoding": object_encoding, + "has_nulls": has_nulls, + } + else: + raise NotImplementedError(f"{self.engine} not supported") + df.to_parquet( + fn, + engine=self.engine, + storage_options=storage_options + or getattr(self.fs, "storage_options", None), + compression="zstd", + index=False, + **df_backend_kwargs, + ) + + partition.clear() + self._items.pop((field, record)) + + def flush(self, base_url=None, storage_options=None): + """Output any modified or deleted keys + + Parameters + ---------- + base_url: str + Location of the output + """ + + # write what we have so far and clear sub chunks + for thing in list(self._items): + if isinstance(thing, tuple): + field, record = thing + self.write( + field, + record, + base_url=base_url, + storage_options=storage_options, + ) + + # gather .zmetadata from self._items and write that too + for k in list(self._items): + if k != ".zmetadata" and ".z" in k: + self.zmetadata[k] = json.loads(self._items.pop(k)) + met = {"metadata": self.zmetadata, "record_size": self.record_size} + self._items.clear() + self._items[".zmetadata"] = json.dumps(met).encode() + self.fs.pipe( + "/".join([base_url or self.out_root, ".zmetadata"]), + self._items[".zmetadata"], + ) + + # TODO: only clear those that we wrote to? + self.open_refs.cache_clear() + + def __len__(self): + # Caveat: This counts expected references, not actual - but is fast + count = 0 + for field in self.listdir(): + if field.startswith("."): + count += 1 + else: + count += math.prod(self._get_chunk_sizes(field)) + count += len(self.zmetadata) # all metadata keys + # any other files not in reference partitions + count += sum(1 for _ in self._items if not isinstance(_, tuple)) + return count + + def __iter__(self): + # Caveat: returns only existing keys, so the number of these does not + # match len(self) + metas = set(self.zmetadata) + metas.update(self._items) + for bit in metas: + if isinstance(bit, str): + yield bit + for field in self.listdir(): + for k in self._keys_in_field(field): + if k in self: + yield k + + def __contains__(self, item): + try: + self._load_one_key(item) + return True + except KeyError: + return False + + def _keys_in_field(self, field): + """List key names in given field + + Produces strings like "field/x.y" appropriate from the chunking of the array + """ + chunk_sizes = self._get_chunk_sizes(field) + if len(chunk_sizes) == 0: + yield field + "/0" + return + inds = itertools.product(*(range(i) for i in chunk_sizes)) + for ind in inds: + yield field + "/" + ".".join([str(c) for c in ind]) + + +class ReferenceFileSystem(AsyncFileSystem): + """View byte ranges of some other file as a file system + Initial version: single file system target, which must support + async, and must allow start and end args in _cat_file. Later versions + may allow multiple arbitrary URLs for the targets. + This FileSystem is read-only. It is designed to be used with async + targets (for now). We do not get original file details from the target FS. + Configuration is by passing a dict of references at init, or a URL to + a JSON file containing the same; this dict + can also contain concrete data for some set of paths. + Reference dict format: + {path0: bytes_data, path1: (target_url, offset, size)} + https://github.com/fsspec/kerchunk/blob/main/README.md + """ + + protocol = "reference" + cachable = False + + def __init__( + self, + fo, + target=None, + ref_storage_args=None, + target_protocol=None, + target_options=None, + remote_protocol=None, + remote_options=None, + fs=None, + template_overrides=None, + simple_templates=True, + max_gap=64_000, + max_block=256_000_000, + cache_size=128, + **kwargs, + ): + """ + Parameters + ---------- + fo : dict or str + The set of references to use for this instance, with a structure as above. + If str referencing a JSON file, will use fsspec.open, in conjunction + with target_options and target_protocol to open and parse JSON at this + location. If a directory, then assume references are a set of parquet + files to be loaded lazily. + target : str + For any references having target_url as None, this is the default file + target to use + ref_storage_args : dict + If references is a str, use these kwargs for loading the JSON file. + Deprecated: use target_options instead. + target_protocol : str + Used for loading the reference file, if it is a path. If None, protocol + will be derived from the given path + target_options : dict + Extra FS options for loading the reference file ``fo``, if given as a path + remote_protocol : str + The protocol of the filesystem on which the references will be evaluated + (unless fs is provided). If not given, will be derived from the first + URL that has a protocol in the templates or in the references, in that + order. + remote_options : dict + kwargs to go with remote_protocol + fs : AbstractFileSystem | dict(str, (AbstractFileSystem | dict)) + Directly provide a file system(s): + - a single filesystem instance + - a dict of protocol:filesystem, where each value is either a filesystem + instance, or a dict of kwargs that can be used to create in + instance for the given protocol + + If this is given, remote_options and remote_protocol are ignored. + template_overrides : dict + Swap out any templates in the references file with these - useful for + testing. + simple_templates: bool + Whether templates can be processed with simple replace (True) or if + jinja is needed (False, much slower). All reference sets produced by + ``kerchunk`` are simple in this sense, but the spec allows for complex. + max_gap, max_block: int + For merging multiple concurrent requests to the same remote file. + Neighboring byte ranges will only be merged when their + inter-range gap is <= ``max_gap``. Default is 64KB. Set to 0 + to only merge when it requires no extra bytes. Pass a negative + number to disable merging, appropriate for local target files. + Neighboring byte ranges will only be merged when the size of + the aggregated range is <= ``max_block``. Default is 256MB. + cache_size : int + Maximum size of LRU cache, where cache_size*record_size denotes + the total number of references that can be loaded in memory at once. + Only used for lazily loaded references. + kwargs : passed to parent class + """ + super().__init__(**kwargs) + self.target = target + self.template_overrides = template_overrides + self.simple_templates = simple_templates + self.templates = {} + self.fss = {} + self._dircache = {} + self.max_gap = max_gap + self.max_block = max_block + if isinstance(fo, str): + dic = dict( + **(ref_storage_args or target_options or {}), protocol=target_protocol + ) + ref_fs, fo2 = fsspec.core.url_to_fs(fo, **dic) + if ref_fs.isfile(fo2): + # text JSON + with fsspec.open(fo, "rb", **dic) as f: + logger.info("Read reference from URL %s", fo) + text = json.load(f) + self._process_references(text, template_overrides) + else: + # Lazy parquet refs + logger.info("Open lazy reference dict from URL %s", fo) + self.references = LazyReferenceMapper( + fo2, + fs=ref_fs, + cache_size=cache_size, + ) + else: + # dictionaries + self._process_references(fo, template_overrides) + if isinstance(fs, dict): + self.fss = { + k: ( + fsspec.filesystem(k.split(":", 1)[0], **opts) + if isinstance(opts, dict) + else opts + ) + for k, opts in fs.items() + } + if None not in self.fss: + self.fss[None] = filesystem("file") + return + if fs is not None: + # single remote FS + remote_protocol = ( + fs.protocol[0] if isinstance(fs.protocol, tuple) else fs.protocol + ) + self.fss[remote_protocol] = fs + + if remote_protocol is None: + # get single protocol from any templates + for ref in self.templates.values(): + if callable(ref): + ref = ref() + protocol, _ = fsspec.core.split_protocol(ref) + if protocol and protocol not in self.fss: + fs = filesystem(protocol, **(remote_options or {})) + self.fss[protocol] = fs + if remote_protocol is None: + # get single protocol from references + # TODO: warning here, since this can be very expensive? + for ref in self.references.values(): + if callable(ref): + ref = ref() + if isinstance(ref, list) and ref[0]: + protocol, _ = fsspec.core.split_protocol(ref[0]) + if protocol not in self.fss: + fs = filesystem(protocol, **(remote_options or {})) + self.fss[protocol] = fs + # only use first remote URL + break + + if remote_protocol and remote_protocol not in self.fss: + fs = filesystem(remote_protocol, **(remote_options or {})) + self.fss[remote_protocol] = fs + + self.fss[None] = fs or filesystem("file") # default one + # Wrap any non-async filesystems to ensure async methods are available below + for k, f in self.fss.items(): + if not f.async_impl: + self.fss[k] = AsyncFileSystemWrapper(f, asynchronous=self.asynchronous) + elif self.asynchronous ^ f.asynchronous: + raise ValueError( + "Reference-FS's target filesystem must have same value " + "of asynchronous" + ) + + def _cat_common(self, path, start=None, end=None): + path = self._strip_protocol(path) + logger.debug(f"cat: {path}") + try: + part = self.references[path] + except KeyError as exc: + raise FileNotFoundError(path) from exc + if isinstance(part, str): + part = part.encode() + if hasattr(part, "to_bytes"): + part = part.to_bytes() + if isinstance(part, bytes): + logger.debug(f"Reference: {path}, type bytes") + if part.startswith(b"base64:"): + part = base64.b64decode(part[7:]) + return part, None, None + + if len(part) == 1: + logger.debug(f"Reference: {path}, whole file => {part}") + url = part[0] + start1, end1 = start, end + else: + url, start0, size = part + logger.debug(f"Reference: {path} => {url}, offset {start0}, size {size}") + end0 = start0 + size + + if start is not None: + if start >= 0: + start1 = start0 + start + else: + start1 = end0 + start + else: + start1 = start0 + if end is not None: + if end >= 0: + end1 = start0 + end + else: + end1 = end0 + end + else: + end1 = end0 + if url is None: + url = self.target + return url, start1, end1 + + async def _cat_file(self, path, start=None, end=None, **kwargs): + part_or_url, start0, end0 = self._cat_common(path, start=start, end=end) + if isinstance(part_or_url, bytes): + return part_or_url[start:end] + protocol, _ = split_protocol(part_or_url) + try: + return await self.fss[protocol]._cat_file( + part_or_url, start=start0, end=end0 + ) + except Exception as e: + raise ReferenceNotReachable(path, part_or_url) from e + + def cat_file(self, path, start=None, end=None, **kwargs): + part_or_url, start0, end0 = self._cat_common(path, start=start, end=end) + if isinstance(part_or_url, bytes): + return part_or_url[start:end] + protocol, _ = split_protocol(part_or_url) + try: + return self.fss[protocol].cat_file(part_or_url, start=start0, end=end0) + except Exception as e: + raise ReferenceNotReachable(path, part_or_url) from e + + def pipe_file(self, path, value, **_): + """Temporarily add binary data or reference as a file""" + self.references[path] = value + + async def _get_file(self, rpath, lpath, **kwargs): + if self.isdir(rpath): + return os.makedirs(lpath, exist_ok=True) + data = await self._cat_file(rpath) + with open(lpath, "wb") as f: + f.write(data) + + def get_file(self, rpath, lpath, callback=DEFAULT_CALLBACK, **kwargs): + if self.isdir(rpath): + return os.makedirs(lpath, exist_ok=True) + data = self.cat_file(rpath, **kwargs) + callback.set_size(len(data)) + if isfilelike(lpath): + lpath.write(data) + else: + with open(lpath, "wb") as f: + f.write(data) + callback.absolute_update(len(data)) + + def get(self, rpath, lpath, recursive=False, **kwargs): + if recursive: + # trigger directory build + self.ls("") + rpath = self.expand_path(rpath, recursive=recursive) + fs = fsspec.filesystem("file", auto_mkdir=True) + targets = other_paths(rpath, lpath) + if recursive: + data = self.cat([r for r in rpath if not self.isdir(r)]) + else: + data = self.cat(rpath) + for remote, local in zip(rpath, targets): + if remote in data: + fs.pipe_file(local, data[remote]) + + def cat(self, path, recursive=False, on_error="raise", **kwargs): + if isinstance(path, str) and recursive: + raise NotImplementedError + if isinstance(path, list) and (recursive or any("*" in p for p in path)): + raise NotImplementedError + # TODO: if references is lazy, pre-fetch all paths in batch before access + proto_dict = _protocol_groups(path, self.references) + out = {} + for proto, paths in proto_dict.items(): + fs = self.fss[proto] + urls, starts, ends, valid_paths = [], [], [], [] + for p in paths: + # find references or label not-found. Early exit if any not + # found and on_error is "raise" + try: + u, s, e = self._cat_common(p) + if not isinstance(u, (bytes, str)): + # nan/None from parquet + continue + except FileNotFoundError as err: + if on_error == "raise": + raise + if on_error != "omit": + out[p] = err + else: + urls.append(u) + starts.append(s) + ends.append(e) + valid_paths.append(p) + + # process references into form for merging + urls2 = [] + starts2 = [] + ends2 = [] + paths2 = [] + whole_files = set() + for u, s, e, p in zip(urls, starts, ends, valid_paths): + if isinstance(u, bytes): + # data + out[p] = u + elif s is None: + # whole file - limits are None, None, but no further + # entries take for this file + whole_files.add(u) + urls2.append(u) + starts2.append(s) + ends2.append(e) + paths2.append(p) + for u, s, e, p in zip(urls, starts, ends, valid_paths): + # second run to account for files that are to be loaded whole + if s is not None and u not in whole_files: + urls2.append(u) + starts2.append(s) + ends2.append(e) + paths2.append(p) + + # merge and fetch consolidated ranges + new_paths, new_starts, new_ends = merge_offset_ranges( + list(urls2), + list(starts2), + list(ends2), + sort=True, + max_gap=self.max_gap, + max_block=self.max_block, + ) + bytes_out = fs.cat_ranges(new_paths, new_starts, new_ends) + + # unbundle from merged bytes - simple approach + for u, s, e, p in zip(urls, starts, ends, valid_paths): + if p in out: + continue # was bytes, already handled + for np, ns, ne, b in zip(new_paths, new_starts, new_ends, bytes_out): + if np == u and (ns is None or ne is None): + if isinstance(b, Exception): + out[p] = b + else: + out[p] = b[s:e] + elif np == u and s >= ns and e <= ne: + if isinstance(b, Exception): + out[p] = b + else: + out[p] = b[s - ns : (e - ne) or None] + + for k, v in out.copy().items(): + # these were valid references, but fetch failed, so transform exc + if isinstance(v, Exception) and k in self.references: + ex = out[k] + new_ex = ReferenceNotReachable(k, self.references[k]) + new_ex.__cause__ = ex + if on_error == "raise": + raise new_ex + elif on_error != "omit": + out[k] = new_ex + + if len(out) == 1 and isinstance(path, str) and "*" not in path: + return _first(out) + return out + + def _process_references(self, references, template_overrides=None): + vers = references.get("version", None) + if vers is None: + self._process_references0(references) + elif vers == 1: + self._process_references1(references, template_overrides=template_overrides) + else: + raise ValueError(f"Unknown reference spec version: {vers}") + # TODO: we make dircache by iterating over all entries, but for Spec >= 1, + # can replace with programmatic. Is it even needed for mapper interface? + + def _process_references0(self, references): + """Make reference dict for Spec Version 0""" + if isinstance(references, dict): + # do not do this for lazy/parquet backend, which will not make dicts, + # but must remain writable in the original object + references = { + key: json.dumps(val) if isinstance(val, dict) else val + for key, val in references.items() + } + self.references = references + + def _process_references1(self, references, template_overrides=None): + if not self.simple_templates or self.templates: + import jinja2 + self.references = {} + self._process_templates(references.get("templates", {})) + + @lru_cache(1000) + def _render_jinja(u): + return jinja2.Template(u).render(**self.templates) + + for k, v in references.get("refs", {}).items(): + if isinstance(v, str): + if v.startswith("base64:"): + self.references[k] = base64.b64decode(v[7:]) + self.references[k] = v + elif isinstance(v, dict): + self.references[k] = json.dumps(v) + elif self.templates: + u = v[0] + if "{{" in u: + if self.simple_templates: + u = ( + u.replace("{{", "{") + .replace("}}", "}") + .format(**self.templates) + ) + else: + u = _render_jinja(u) + self.references[k] = [u] if len(v) == 1 else [u, v[1], v[2]] + else: + self.references[k] = v + self.references.update(self._process_gen(references.get("gen", []))) + + def _process_templates(self, tmp): + self.templates = {} + if self.template_overrides is not None: + tmp.update(self.template_overrides) + for k, v in tmp.items(): + if "{{" in v: + import jinja2 + + self.templates[k] = lambda temp=v, **kwargs: jinja2.Template( + temp + ).render(**kwargs) + else: + self.templates[k] = v + + def _process_gen(self, gens): + out = {} + for gen in gens: + dimension = { + k: ( + v + if isinstance(v, list) + else range(v.get("start", 0), v["stop"], v.get("step", 1)) + ) + for k, v in gen["dimensions"].items() + } + products = ( + dict(zip(dimension.keys(), values)) + for values in itertools.product(*dimension.values()) + ) + for pr in products: + import jinja2 + + key = jinja2.Template(gen["key"]).render(**pr, **self.templates) + url = jinja2.Template(gen["url"]).render(**pr, **self.templates) + if ("offset" in gen) and ("length" in gen): + offset = int( + jinja2.Template(gen["offset"]).render(**pr, **self.templates) + ) + length = int( + jinja2.Template(gen["length"]).render(**pr, **self.templates) + ) + out[key] = [url, offset, length] + elif ("offset" in gen) ^ ("length" in gen): + raise ValueError( + "Both 'offset' and 'length' are required for a " + "reference generator entry if either is provided." + ) + else: + out[key] = [url] + return out + + def _dircache_from_items(self): + self.dircache = {"": []} + it = self.references.items() + for path, part in it: + if isinstance(part, (bytes, str)) or hasattr(part, "to_bytes"): + size = len(part) + elif len(part) == 1: + size = None + else: + _, _, size = part + par = path.rsplit("/", 1)[0] if "/" in path else "" + par0 = par + subdirs = [par0] + while par0 and par0 not in self.dircache: + # collect parent directories + par0 = self._parent(par0) + subdirs.append(par0) + + subdirs.reverse() + for parent, child in zip(subdirs, subdirs[1:]): + # register newly discovered directories + assert child not in self.dircache + assert parent in self.dircache + self.dircache[parent].append( + {"name": child, "type": "directory", "size": 0} + ) + self.dircache[child] = [] + + self.dircache[par].append({"name": path, "type": "file", "size": size}) + + def _open(self, path, mode="rb", block_size=None, cache_options=None, **kwargs): + part_or_url, start0, end0 = self._cat_common(path) + # This logic is kept outside `ReferenceFile` to avoid unnecessary redirection. + # That does mean `_cat_common` gets called twice if it eventually reaches `ReferenceFile`. + if isinstance(part_or_url, bytes): + return io.BytesIO(part_or_url[start0:end0]) + + protocol, _ = split_protocol(part_or_url) + if start0 is None and end0 is None: + return self.fss[protocol]._open( + part_or_url, + mode, + block_size=block_size, + cache_options=cache_options, + **kwargs, + ) + + return ReferenceFile( + self, + path, + mode, + block_size=block_size, + cache_options=cache_options, + **kwargs, + ) + + def ls(self, path, detail=True, **kwargs): + logger.debug("list %s", path) + path = self._strip_protocol(path) + if isinstance(self.references, LazyReferenceMapper): + try: + return self.references.ls(path, detail) + except KeyError: + pass + raise FileNotFoundError(f"'{path}' is not a known key") + if not self.dircache: + self._dircache_from_items() + out = self._ls_from_cache(path) + if out is None: + raise FileNotFoundError(path) + if detail: + return out + return [o["name"] for o in out] + + def exists(self, path, **kwargs): # overwrite auto-sync version + return self.isdir(path) or self.isfile(path) + + def isdir(self, path): # overwrite auto-sync version + if self.dircache: + return path in self.dircache + elif isinstance(self.references, LazyReferenceMapper): + return path in self.references.listdir() + else: + # this may be faster than building dircache for single calls, but + # by looping will be slow for many calls; could cache it? + return any(_.startswith(f"{path}/") for _ in self.references) + + def isfile(self, path): # overwrite auto-sync version + return path in self.references + + async def _ls(self, path, detail=True, **kwargs): # calls fast sync code + return self.ls(path, detail, **kwargs) + + def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + if withdirs: + return super().find( + path, maxdepth=maxdepth, withdirs=withdirs, detail=detail, **kwargs + ) + if path: + path = self._strip_protocol(path) + r = sorted(k for k in self.references if k.startswith(path)) + else: + r = sorted(self.references) + if detail: + if not self.dircache: + self._dircache_from_items() + return {k: self._ls_from_cache(k)[0] for k in r} + else: + return r + + def info(self, path, **kwargs): + out = self.references.get(path) + if out is not None: + if isinstance(out, (str, bytes)): + # decode base64 here + return {"name": path, "type": "file", "size": len(out)} + elif len(out) > 1: + return {"name": path, "type": "file", "size": out[2]} + else: + out0 = [{"name": path, "type": "file", "size": None}] + else: + out = self.ls(path, True) + out0 = [o for o in out if o["name"] == path] + if not out0: + return {"name": path, "type": "directory", "size": 0} + if out0[0]["size"] is None: + # if this is a whole remote file, update size using remote FS + prot, _ = split_protocol(self.references[path][0]) + out0[0]["size"] = self.fss[prot].size(self.references[path][0]) + return out0[0] + + async def _info(self, path, **kwargs): # calls fast sync code + return self.info(path) + + async def _rm_file(self, path, **kwargs): + self.references.pop( + path, None + ) # ignores FileNotFound, just as well for directories + self.dircache.clear() # this is a bit heavy handed + + async def _pipe_file(self, path, data, mode="overwrite", **kwargs): + if mode == "create" and self.exists(path): + raise FileExistsError + # can be str or bytes + self.references[path] = data + self.dircache.clear() # this is a bit heavy handed + + async def _put_file(self, lpath, rpath, mode="overwrite", **kwargs): + # puts binary + if mode == "create" and self.exists(rpath): + raise FileExistsError + with open(lpath, "rb") as f: + self.references[rpath] = f.read() + self.dircache.clear() # this is a bit heavy handed + + def save_json(self, url, **storage_options): + """Write modified references into new location""" + out = {} + for k, v in self.references.items(): + if isinstance(v, bytes): + try: + out[k] = v.decode("ascii") + except UnicodeDecodeError: + out[k] = (b"base64:" + base64.b64encode(v)).decode() + else: + out[k] = v + with fsspec.open(url, "wb", **storage_options) as f: + f.write(json.dumps({"version": 1, "refs": out}).encode()) + + +class ReferenceFile(AbstractBufferedFile): + def __init__( + self, + fs, + path, + mode="rb", + block_size="default", + autocommit=True, + cache_type="readahead", + cache_options=None, + size=None, + **kwargs, + ): + super().__init__( + fs, + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + size=size, + cache_type=cache_type, + cache_options=cache_options, + **kwargs, + ) + part_or_url, self.start, self.end = self.fs._cat_common(self.path) + protocol, _ = split_protocol(part_or_url) + self.src_fs = self.fs.fss[protocol] + self.src_path = part_or_url + self._f = None + + @property + def f(self): + if self._f is None or self._f.closed: + self._f = self.src_fs._open( + self.src_path, + mode=self.mode, + block_size=self.blocksize, + autocommit=self.autocommit, + cache_type="none", + **self.kwargs, + ) + return self._f + + def close(self): + if self._f is not None: + self._f.close() + return super().close() + + def _fetch_range(self, start, end): + start = start + self.start + end = min(end + self.start, self.end) + self.f.seek(start) + return self.f.read(end - start) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/sftp.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/sftp.py new file mode 100644 index 0000000000000000000000000000000000000000..77f7b370cd246f9a9bfd34141afc3edd728d13c3 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/sftp.py @@ -0,0 +1,180 @@ +import datetime +import logging +import os +import types +import uuid +from stat import S_ISDIR, S_ISLNK + +import paramiko + +from .. import AbstractFileSystem +from ..utils import infer_storage_options + +logger = logging.getLogger("fsspec.sftp") + + +class SFTPFileSystem(AbstractFileSystem): + """Files over SFTP/SSH + + Peer-to-peer filesystem over SSH using paramiko. + + Note: if using this with the ``open`` or ``open_files``, with full URLs, + there is no way to tell if a path is relative, so all paths are assumed + to be absolute. + """ + + protocol = "sftp", "ssh" + + def __init__(self, host, **ssh_kwargs): + """ + + Parameters + ---------- + host: str + Hostname or IP as a string + temppath: str + Location on the server to put files, when within a transaction + ssh_kwargs: dict + Parameters passed on to connection. See details in + https://docs.paramiko.org/en/3.3/api/client.html#paramiko.client.SSHClient.connect + May include port, username, password... + """ + if self._cached: + return + super().__init__(**ssh_kwargs) + self.temppath = ssh_kwargs.pop("temppath", "/tmp") # remote temp directory + self.host = host + self.ssh_kwargs = ssh_kwargs + self._connect() + + def _connect(self): + logger.debug("Connecting to SFTP server %s", self.host) + self.client = paramiko.SSHClient() + self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + self.client.connect(self.host, **self.ssh_kwargs) + self.ftp = self.client.open_sftp() + + @classmethod + def _strip_protocol(cls, path): + return infer_storage_options(path)["path"] + + @staticmethod + def _get_kwargs_from_urls(urlpath): + out = infer_storage_options(urlpath) + out.pop("path", None) + out.pop("protocol", None) + return out + + def mkdir(self, path, create_parents=True, mode=511): + logger.debug("Creating folder %s", path) + if self.exists(path): + raise FileExistsError(f"File exists: {path}") + + if create_parents: + self.makedirs(path) + else: + self.ftp.mkdir(path, mode) + + def makedirs(self, path, exist_ok=False, mode=511): + if self.exists(path) and not exist_ok: + raise FileExistsError(f"File exists: {path}") + + parts = path.split("/") + new_path = "/" if path[:1] == "/" else "" + + for part in parts: + if part: + new_path = f"{new_path}/{part}" if new_path else part + if not self.exists(new_path): + self.ftp.mkdir(new_path, mode) + + def rmdir(self, path): + logger.debug("Removing folder %s", path) + self.ftp.rmdir(path) + + def info(self, path): + stat = self._decode_stat(self.ftp.stat(path)) + stat["name"] = path + return stat + + @staticmethod + def _decode_stat(stat, parent_path=None): + if S_ISDIR(stat.st_mode): + t = "directory" + elif S_ISLNK(stat.st_mode): + t = "link" + else: + t = "file" + out = { + "name": "", + "size": stat.st_size, + "type": t, + "uid": stat.st_uid, + "gid": stat.st_gid, + "time": datetime.datetime.fromtimestamp( + stat.st_atime, tz=datetime.timezone.utc + ), + "mtime": datetime.datetime.fromtimestamp( + stat.st_mtime, tz=datetime.timezone.utc + ), + } + if parent_path: + out["name"] = "/".join([parent_path.rstrip("/"), stat.filename]) + return out + + def ls(self, path, detail=False): + logger.debug("Listing folder %s", path) + stats = [self._decode_stat(stat, path) for stat in self.ftp.listdir_iter(path)] + if detail: + return stats + else: + paths = [stat["name"] for stat in stats] + return sorted(paths) + + def put(self, lpath, rpath, callback=None, **kwargs): + logger.debug("Put file %s into %s", lpath, rpath) + self.ftp.put(lpath, rpath) + + def get_file(self, rpath, lpath, **kwargs): + if self.isdir(rpath): + os.makedirs(lpath, exist_ok=True) + else: + self.ftp.get(self._strip_protocol(rpath), lpath) + + def _open(self, path, mode="rb", block_size=None, **kwargs): + """ + block_size: int or None + If 0, no buffering, if 1, line buffering, if >1, buffer that many + bytes, if None use default from paramiko. + """ + logger.debug("Opening file %s", path) + if kwargs.get("autocommit", True) is False: + # writes to temporary file, move on commit + path2 = "/".join([self.temppath, str(uuid.uuid4())]) + f = self.ftp.open(path2, mode, bufsize=block_size if block_size else -1) + f.temppath = path2 + f.targetpath = path + f.fs = self + f.commit = types.MethodType(commit_a_file, f) + f.discard = types.MethodType(discard_a_file, f) + else: + f = self.ftp.open(path, mode, bufsize=block_size if block_size else -1) + return f + + def _rm(self, path): + if self.isdir(path): + self.ftp.rmdir(path) + else: + self.ftp.remove(path) + + def mv(self, old, new): + logger.debug("Renaming %s into %s", old, new) + self.ftp.posix_rename(old, new) + + +def commit_a_file(self): + self.fs.mv(self.temppath, self.targetpath) + + +def discard_a_file(self): + self.fs._rm(self.temppath) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/smb.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/smb.py new file mode 100644 index 0000000000000000000000000000000000000000..db6b3f5c3702de90cf121ccca49f3ca2b580df9f --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/smb.py @@ -0,0 +1,416 @@ +""" +This module contains SMBFileSystem class responsible for handling access to +Windows Samba network shares by using package smbprotocol +""" + +import datetime +import re +import uuid +from stat import S_ISDIR, S_ISLNK + +import smbclient +import smbprotocol.exceptions + +from .. import AbstractFileSystem +from ..utils import infer_storage_options + +# ! pylint: disable=bad-continuation + + +class SMBFileSystem(AbstractFileSystem): + """Allow reading and writing to Windows and Samba network shares. + + When using `fsspec.open()` for getting a file-like object the URI + should be specified as this format: + ``smb://workgroup;user:password@server:port/share/folder/file.csv``. + + Example:: + + >>> import fsspec + >>> with fsspec.open( + ... 'smb://myuser:mypassword@myserver.com/' 'share/folder/file.csv' + ... ) as smbfile: + ... df = pd.read_csv(smbfile, sep='|', header=None) + + Note that you need to pass in a valid hostname or IP address for the host + component of the URL. Do not use the Windows/NetBIOS machine name for the + host component. + + The first component of the path in the URL points to the name of the shared + folder. Subsequent path components will point to the directory/folder/file. + + The URL components ``workgroup`` , ``user``, ``password`` and ``port`` may be + optional. + + .. note:: + + For working this source require `smbprotocol`_ to be installed, e.g.:: + + $ pip install smbprotocol + # or + # pip install smbprotocol[kerberos] + + .. _smbprotocol: https://github.com/jborean93/smbprotocol#requirements + + Note: if using this with the ``open`` or ``open_files``, with full URLs, + there is no way to tell if a path is relative, so all paths are assumed + to be absolute. + """ + + protocol = "smb" + + # pylint: disable=too-many-arguments + def __init__( + self, + host, + port=None, + username=None, + password=None, + timeout=60, + encrypt=None, + share_access=None, + register_session_retries=4, + register_session_retry_wait=1, + register_session_retry_factor=10, + auto_mkdir=False, + **kwargs, + ): + """ + You can use _get_kwargs_from_urls to get some kwargs from + a reasonable SMB url. + + Authentication will be anonymous or integrated if username/password are not + given. + + Parameters + ---------- + host: str + The remote server name/ip to connect to + port: int or None + Port to connect with. Usually 445, sometimes 139. + username: str or None + Username to connect with. Required if Kerberos auth is not being used. + password: str or None + User's password on the server, if using username + timeout: int + Connection timeout in seconds + encrypt: bool + Whether to force encryption or not, once this has been set to True + the session cannot be changed back to False. + share_access: str or None + Specifies the default access applied to file open operations + performed with this file system object. + This affects whether other processes can concurrently open a handle + to the same file. + + - None (the default): exclusively locks the file until closed. + - 'r': Allow other handles to be opened with read access. + - 'w': Allow other handles to be opened with write access. + - 'd': Allow other handles to be opened with delete access. + register_session_retries: int + Number of retries to register a session with the server. Retries are not performed + for authentication errors, as they are considered as invalid credentials and not network + issues. If set to negative value, no register attempts will be performed. + register_session_retry_wait: int + Time in seconds to wait between each retry. Number must be non-negative. + register_session_retry_factor: int + Base factor for the wait time between each retry. The wait time + is calculated using exponential function. For factor=1 all wait times + will be equal to `register_session_retry_wait`. For any number of retries, + the last wait time will be equal to `register_session_retry_wait` and for retries>1 + the first wait time will be equal to `register_session_retry_wait / factor`. + Number must be equal to or greater than 1. Optimal factor is 10. + auto_mkdir: bool + Whether, when opening a file, the directory containing it should + be created (if it doesn't already exist). This is assumed by pyarrow + and zarr-python code. + """ + super().__init__(**kwargs) + self.host = host + self.port = port + self.username = username + self.password = password + self.timeout = timeout + self.encrypt = encrypt + self.temppath = kwargs.pop("temppath", "") + self.share_access = share_access + self.register_session_retries = register_session_retries + if register_session_retry_wait < 0: + raise ValueError( + "register_session_retry_wait must be a non-negative integer" + ) + self.register_session_retry_wait = register_session_retry_wait + if register_session_retry_factor < 1: + raise ValueError( + "register_session_retry_factor must be a positive " + "integer equal to or greater than 1" + ) + self.register_session_retry_factor = register_session_retry_factor + self.auto_mkdir = auto_mkdir + self._connect() + + @property + def _port(self): + return 445 if self.port is None else self.port + + def _connect(self): + import time + + if self.register_session_retries <= -1: + return + + retried_errors = [] + + wait_time = self.register_session_retry_wait + n_waits = ( + self.register_session_retries - 1 + ) # -1 = No wait time after the last retry + factor = self.register_session_retry_factor + + # Generate wait times for each retry attempt. + # Wait times are calculated using exponential function. For factor=1 all wait times + # will be equal to `wait`. For any number of retries the last wait time will be + # equal to `wait` and for retries>2 the first wait time will be equal to `wait / factor`. + wait_times = iter( + factor ** (n / n_waits - 1) * wait_time for n in range(0, n_waits + 1) + ) + + for attempt in range(self.register_session_retries + 1): + try: + smbclient.register_session( + self.host, + username=self.username, + password=self.password, + port=self._port, + encrypt=self.encrypt, + connection_timeout=self.timeout, + ) + return + except ( + smbprotocol.exceptions.SMBAuthenticationError, + smbprotocol.exceptions.LogonFailure, + ): + # These exceptions should not be repeated, as they clearly indicate + # that the credentials are invalid and not a network issue. + raise + except ValueError as exc: + if re.findall(r"\[Errno -\d+]", str(exc)): + # This exception is raised by the smbprotocol.transport:Tcp.connect + # and originates from socket.gaierror (OSError). These exceptions might + # be raised due to network instability. We will retry to connect. + retried_errors.append(exc) + else: + # All another ValueError exceptions should be raised, as they are not + # related to network issues. + raise + except Exception as exc: + # Save the exception and retry to connect. This except might be dropped + # in the future, once all exceptions suited for retry are identified. + retried_errors.append(exc) + + if attempt < self.register_session_retries: + time.sleep(next(wait_times)) + + # Raise last exception to inform user about the connection issues. + # Note: Should we use ExceptionGroup to raise all exceptions? + raise retried_errors[-1] + + @classmethod + def _strip_protocol(cls, path): + return infer_storage_options(path)["path"] + + @staticmethod + def _get_kwargs_from_urls(path): + # smb://workgroup;user:password@host:port/share/folder/file.csv + out = infer_storage_options(path) + out.pop("path", None) + out.pop("protocol", None) + return out + + def mkdir(self, path, create_parents=True, **kwargs): + wpath = _as_unc_path(self.host, path) + if create_parents: + smbclient.makedirs(wpath, exist_ok=False, port=self._port, **kwargs) + else: + smbclient.mkdir(wpath, port=self._port, **kwargs) + + def makedirs(self, path, exist_ok=False): + if _share_has_path(path): + wpath = _as_unc_path(self.host, path) + smbclient.makedirs(wpath, exist_ok=exist_ok, port=self._port) + + def rmdir(self, path): + if _share_has_path(path): + wpath = _as_unc_path(self.host, path) + smbclient.rmdir(wpath, port=self._port) + + def info(self, path, **kwargs): + wpath = _as_unc_path(self.host, path) + stats = smbclient.stat(wpath, port=self._port, **kwargs) + if S_ISDIR(stats.st_mode): + stype = "directory" + elif S_ISLNK(stats.st_mode): + stype = "link" + else: + stype = "file" + res = { + "name": path + "/" if stype == "directory" else path, + "size": stats.st_size, + "type": stype, + "uid": stats.st_uid, + "gid": stats.st_gid, + "time": stats.st_atime, + "mtime": stats.st_mtime, + } + return res + + def created(self, path): + """Return the created timestamp of a file as a datetime.datetime""" + wpath = _as_unc_path(self.host, path) + stats = smbclient.stat(wpath, port=self._port) + return datetime.datetime.fromtimestamp(stats.st_ctime, tz=datetime.timezone.utc) + + def modified(self, path): + """Return the modified timestamp of a file as a datetime.datetime""" + wpath = _as_unc_path(self.host, path) + stats = smbclient.stat(wpath, port=self._port) + return datetime.datetime.fromtimestamp(stats.st_mtime, tz=datetime.timezone.utc) + + def ls(self, path, detail=True, **kwargs): + unc = _as_unc_path(self.host, path) + listed = smbclient.listdir(unc, port=self._port, **kwargs) + dirs = ["/".join([path.rstrip("/"), p]) for p in listed] + if detail: + dirs = [self.info(d) for d in dirs] + return dirs + + # pylint: disable=too-many-arguments + def _open( + self, + path, + mode="rb", + block_size=-1, + autocommit=True, + cache_options=None, + **kwargs, + ): + """ + block_size: int or None + If 0, no buffering, 1, line buffering, >1, buffer that many bytes + + Notes + ----- + By specifying 'share_access' in 'kwargs' it is possible to override the + default shared access setting applied in the constructor of this object. + """ + if self.auto_mkdir and "w" in mode: + self.makedirs(self._parent(path), exist_ok=True) + bls = block_size if block_size is not None and block_size >= 0 else -1 + wpath = _as_unc_path(self.host, path) + share_access = kwargs.pop("share_access", self.share_access) + if "w" in mode and autocommit is False: + temp = _as_temp_path(self.host, path, self.temppath) + return SMBFileOpener( + wpath, temp, mode, port=self._port, block_size=bls, **kwargs + ) + return smbclient.open_file( + wpath, + mode, + buffering=bls, + share_access=share_access, + port=self._port, + **kwargs, + ) + + def copy(self, path1, path2, **kwargs): + """Copy within two locations in the same filesystem""" + wpath1 = _as_unc_path(self.host, path1) + wpath2 = _as_unc_path(self.host, path2) + if self.auto_mkdir: + self.makedirs(self._parent(path2), exist_ok=True) + smbclient.copyfile(wpath1, wpath2, port=self._port, **kwargs) + + def _rm(self, path): + if _share_has_path(path): + wpath = _as_unc_path(self.host, path) + stats = smbclient.stat(wpath, port=self._port) + if S_ISDIR(stats.st_mode): + smbclient.rmdir(wpath, port=self._port) + else: + smbclient.remove(wpath, port=self._port) + + def mv(self, path1, path2, recursive=None, maxdepth=None, **kwargs): + wpath1 = _as_unc_path(self.host, path1) + wpath2 = _as_unc_path(self.host, path2) + smbclient.rename(wpath1, wpath2, port=self._port, **kwargs) + + +def _as_unc_path(host, path): + rpath = path.replace("/", "\\") + unc = f"\\\\{host}{rpath}" + return unc + + +def _as_temp_path(host, path, temppath): + share = path.split("/")[1] + temp_file = f"/{share}{temppath}/{uuid.uuid4()}" + unc = _as_unc_path(host, temp_file) + return unc + + +def _share_has_path(path): + parts = path.count("/") + if path.endswith("/"): + return parts > 2 + return parts > 1 + + +class SMBFileOpener: + """writes to remote temporary file, move on commit""" + + def __init__(self, path, temp, mode, port=445, block_size=-1, **kwargs): + self.path = path + self.temp = temp + self.mode = mode + self.block_size = block_size + self.kwargs = kwargs + self.smbfile = None + self._incontext = False + self.port = port + self._open() + + def _open(self): + if self.smbfile is None or self.smbfile.closed: + self.smbfile = smbclient.open_file( + self.temp, + self.mode, + port=self.port, + buffering=self.block_size, + **self.kwargs, + ) + + def commit(self): + """Move temp file to definitive on success.""" + # TODO: use transaction support in SMB protocol + smbclient.replace(self.temp, self.path, port=self.port) + + def discard(self): + """Remove the temp file on failure.""" + smbclient.remove(self.temp, port=self.port) + + def __fspath__(self): + return self.path + + def __iter__(self): + return self.smbfile.__iter__() + + def __getattr__(self, item): + return getattr(self.smbfile, item) + + def __enter__(self): + self._incontext = True + return self.smbfile.__enter__() + + def __exit__(self, exc_type, exc_value, traceback): + self._incontext = False + self.smbfile.__exit__(exc_type, exc_value, traceback) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/tar.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/tar.py new file mode 100644 index 0000000000000000000000000000000000000000..412e5ba4d2cdea7db090dc96412e697909a38d78 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/tar.py @@ -0,0 +1,124 @@ +import logging +import tarfile + +import fsspec +from fsspec.archive import AbstractArchiveFileSystem +from fsspec.compression import compr +from fsspec.utils import infer_compression + +typemap = {b"0": "file", b"5": "directory"} + +logger = logging.getLogger("tar") + + +class TarFileSystem(AbstractArchiveFileSystem): + """Compressed Tar archives as a file-system (read-only) + + Supports the following formats: + tar.gz, tar.bz2, tar.xz + """ + + root_marker = "" + protocol = "tar" + cachable = False + + def __init__( + self, + fo="", + index_store=None, + target_options=None, + target_protocol=None, + compression=None, + **kwargs, + ): + super().__init__(**kwargs) + target_options = target_options or {} + + if isinstance(fo, str): + self.of = fsspec.open(fo, protocol=target_protocol, **target_options) + fo = self.of.open() # keep the reference + + # Try to infer compression. + if compression is None: + name = None + + # Try different ways to get hold of the filename. `fo` might either + # be a `fsspec.LocalFileOpener`, an `io.BufferedReader` or an + # `fsspec.AbstractFileSystem` instance. + try: + # Amended io.BufferedReader or similar. + # This uses a "protocol extension" where original filenames are + # propagated to archive-like filesystems in order to let them + # infer the right compression appropriately. + if hasattr(fo, "original"): + name = fo.original + + # fsspec.LocalFileOpener + elif hasattr(fo, "path"): + name = fo.path + + # io.BufferedReader + elif hasattr(fo, "name"): + name = fo.name + + # fsspec.AbstractFileSystem + elif hasattr(fo, "info"): + name = fo.info()["name"] + + except Exception as ex: + logger.warning( + f"Unable to determine file name, not inferring compression: {ex}" + ) + + if name is not None: + compression = infer_compression(name) + logger.info(f"Inferred compression {compression} from file name {name}") + + if compression is not None: + # TODO: tarfile already implements compression with modes like "'r:gz'", + # but then would seek to offset in the file work? + fo = compr[compression](fo) + + self._fo_ref = fo + self.fo = fo # the whole instance is a context + self.tar = tarfile.TarFile(fileobj=self.fo) + self.dir_cache = None + + self.index_store = index_store + self.index = None + self._index() + + def _index(self): + # TODO: load and set saved index, if exists + out = {} + for ti in self.tar: + info = ti.get_info() + info["type"] = typemap.get(info["type"], "file") + name = ti.get_info()["name"].rstrip("/") + out[name] = (info, ti.offset_data) + + self.index = out + # TODO: save index to self.index_store here, if set + + def _get_dirs(self): + if self.dir_cache is not None: + return + + # This enables ls to get directories as children as well as files + self.dir_cache = { + dirname: {"name": dirname, "size": 0, "type": "directory"} + for dirname in self._all_dirnames(self.tar.getnames()) + } + for member in self.tar.getmembers(): + info = member.get_info() + info["name"] = info["name"].rstrip("/") + info["type"] = typemap.get(info["type"], "file") + self.dir_cache[info["name"]] = info + + def _open(self, path, mode="rb", **kwargs): + if mode != "rb": + raise ValueError("Read-only filesystem implementation") + details, offset = self.index[path] + if details["type"] != "file": + raise ValueError("Can only handle regular files") + return self.tar.extractfile(path) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py new file mode 100644 index 0000000000000000000000000000000000000000..c6e0d8446f875ec9578cccf055aeb47cd1c2e996 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py @@ -0,0 +1,485 @@ +# https://hadoop.apache.org/docs/r1.0.4/webhdfs.html + +import logging +import os +import secrets +import shutil +import tempfile +import uuid +from contextlib import suppress +from urllib.parse import quote + +import requests + +from ..spec import AbstractBufferedFile, AbstractFileSystem +from ..utils import infer_storage_options, tokenize + +logger = logging.getLogger("webhdfs") + + +class WebHDFS(AbstractFileSystem): + """ + Interface to HDFS over HTTP using the WebHDFS API. Supports also HttpFS gateways. + + Four auth mechanisms are supported: + + insecure: no auth is done, and the user is assumed to be whoever they + say they are (parameter ``user``), or a predefined value such as + "dr.who" if not given + spnego: when kerberos authentication is enabled, auth is negotiated by + requests_kerberos https://github.com/requests/requests-kerberos . + This establishes a session based on existing kinit login and/or + specified principal/password; parameters are passed with ``kerb_kwargs`` + token: uses an existing Hadoop delegation token from another secured + service. Indeed, this client can also generate such tokens when + not insecure. Note that tokens expire, but can be renewed (by a + previously specified user) and may allow for proxying. + basic-auth: used when both parameter ``user`` and parameter ``password`` + are provided. + + """ + + tempdir = str(tempfile.gettempdir()) + protocol = "webhdfs", "webHDFS" + + def __init__( + self, + host, + port=50070, + kerberos=False, + token=None, + user=None, + password=None, + proxy_to=None, + kerb_kwargs=None, + data_proxy=None, + use_https=False, + session_cert=None, + session_verify=True, + **kwargs, + ): + """ + Parameters + ---------- + host: str + Name-node address + port: int + Port for webHDFS + kerberos: bool + Whether to authenticate with kerberos for this connection + token: str or None + If given, use this token on every call to authenticate. A user + and user-proxy may be encoded in the token and should not be also + given + user: str or None + If given, assert the user name to connect with + password: str or None + If given, assert the password to use for basic auth. If password + is provided, user must be provided also + proxy_to: str or None + If given, the user has the authority to proxy, and this value is + the user in who's name actions are taken + kerb_kwargs: dict + Any extra arguments for HTTPKerberosAuth, see + ``_ + data_proxy: dict, callable or None + If given, map data-node addresses. This can be necessary if the + HDFS cluster is behind a proxy, running on Docker or otherwise has + a mismatch between the host-names given by the name-node and the + address by which to refer to them from the client. If a dict, + maps host names ``host->data_proxy[host]``; if a callable, full + URLs are passed, and function must conform to + ``url->data_proxy(url)``. + use_https: bool + Whether to connect to the Name-node using HTTPS instead of HTTP + session_cert: str or Tuple[str, str] or None + Path to a certificate file, or tuple of (cert, key) files to use + for the requests.Session + session_verify: str, bool or None + Path to a certificate file to use for verifying the requests.Session. + kwargs + """ + if self._cached: + return + super().__init__(**kwargs) + self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1" + self.kerb = kerberos + self.kerb_kwargs = kerb_kwargs or {} + self.pars = {} + self.proxy = data_proxy or {} + if token is not None: + if user is not None or proxy_to is not None: + raise ValueError( + "If passing a delegation token, must not set " + "user or proxy_to, as these are encoded in the" + " token" + ) + self.pars["delegation"] = token + self.user = user + self.password = password + + if password is not None: + if user is None: + raise ValueError( + "If passing a password, the user must also be" + "set in order to set up the basic-auth" + ) + else: + if user is not None: + self.pars["user.name"] = user + + if proxy_to is not None: + self.pars["doas"] = proxy_to + if kerberos and user is not None: + raise ValueError( + "If using Kerberos auth, do not specify the " + "user, this is handled by kinit." + ) + + self.session_cert = session_cert + self.session_verify = session_verify + + self._connect() + + self._fsid = f"webhdfs_{tokenize(host, port)}" + + @property + def fsid(self): + return self._fsid + + def _connect(self): + self.session = requests.Session() + + if self.session_cert: + self.session.cert = self.session_cert + + self.session.verify = self.session_verify + + if self.kerb: + from requests_kerberos import HTTPKerberosAuth + + self.session.auth = HTTPKerberosAuth(**self.kerb_kwargs) + + if self.user is not None and self.password is not None: + from requests.auth import HTTPBasicAuth + + self.session.auth = HTTPBasicAuth(self.user, self.password) + + def _call(self, op, method="get", path=None, data=None, redirect=True, **kwargs): + path = self._strip_protocol(path) if path is not None else "" + url = self._apply_proxy(self.url + quote(path, safe="/=")) + args = kwargs.copy() + args.update(self.pars) + args["op"] = op.upper() + logger.debug("sending %s with %s", url, method) + out = self.session.request( + method=method.upper(), + url=url, + params=args, + data=data, + allow_redirects=redirect, + ) + if out.status_code in [400, 401, 403, 404, 500]: + try: + err = out.json() + msg = err["RemoteException"]["message"] + exp = err["RemoteException"]["exception"] + except (ValueError, KeyError): + pass + else: + if exp in ["IllegalArgumentException", "UnsupportedOperationException"]: + raise ValueError(msg) + elif exp in ["SecurityException", "AccessControlException"]: + raise PermissionError(msg) + elif exp in ["FileNotFoundException"]: + raise FileNotFoundError(msg) + else: + raise RuntimeError(msg) + out.raise_for_status() + return out + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + replication=None, + permissions=None, + **kwargs, + ): + """ + + Parameters + ---------- + path: str + File location + mode: str + 'rb', 'wb', etc. + block_size: int + Client buffer size for read-ahead or write buffer + autocommit: bool + If False, writes to temporary file that only gets put in final + location upon commit + replication: int + Number of copies of file on the cluster, write mode only + permissions: str or int + posix permissions, write mode only + kwargs + + Returns + ------- + WebHDFile instance + """ + block_size = block_size or self.blocksize + return WebHDFile( + self, + path, + mode=mode, + block_size=block_size, + tempdir=self.tempdir, + autocommit=autocommit, + replication=replication, + permissions=permissions, + ) + + @staticmethod + def _process_info(info): + info["type"] = info["type"].lower() + info["size"] = info["length"] + return info + + @classmethod + def _strip_protocol(cls, path): + return infer_storage_options(path)["path"] + + @staticmethod + def _get_kwargs_from_urls(urlpath): + out = infer_storage_options(urlpath) + out.pop("path", None) + out.pop("protocol", None) + if "username" in out: + out["user"] = out.pop("username") + return out + + def info(self, path): + out = self._call("GETFILESTATUS", path=path) + info = out.json()["FileStatus"] + info["name"] = path + return self._process_info(info) + + def ls(self, path, detail=False): + out = self._call("LISTSTATUS", path=path) + infos = out.json()["FileStatuses"]["FileStatus"] + for info in infos: + self._process_info(info) + info["name"] = path.rstrip("/") + "/" + info["pathSuffix"] + if detail: + return sorted(infos, key=lambda i: i["name"]) + else: + return sorted(info["name"] for info in infos) + + def content_summary(self, path): + """Total numbers of files, directories and bytes under path""" + out = self._call("GETCONTENTSUMMARY", path=path) + return out.json()["ContentSummary"] + + def ukey(self, path): + """Checksum info of file, giving method and result""" + out = self._call("GETFILECHECKSUM", path=path, redirect=False) + if "Location" in out.headers: + location = self._apply_proxy(out.headers["Location"]) + out2 = self.session.get(location) + out2.raise_for_status() + return out2.json()["FileChecksum"] + else: + out.raise_for_status() + return out.json()["FileChecksum"] + + def home_directory(self): + """Get user's home directory""" + out = self._call("GETHOMEDIRECTORY") + return out.json()["Path"] + + def get_delegation_token(self, renewer=None): + """Retrieve token which can give the same authority to other uses + + Parameters + ---------- + renewer: str or None + User who may use this token; if None, will be current user + """ + if renewer: + out = self._call("GETDELEGATIONTOKEN", renewer=renewer) + else: + out = self._call("GETDELEGATIONTOKEN") + t = out.json()["Token"] + if t is None: + raise ValueError("No token available for this user/security context") + return t["urlString"] + + def renew_delegation_token(self, token): + """Make token live longer. Returns new expiry time""" + out = self._call("RENEWDELEGATIONTOKEN", method="put", token=token) + return out.json()["long"] + + def cancel_delegation_token(self, token): + """Stop the token from being useful""" + self._call("CANCELDELEGATIONTOKEN", method="put", token=token) + + def chmod(self, path, mod): + """Set the permission at path + + Parameters + ---------- + path: str + location to set (file or directory) + mod: str or int + posix epresentation or permission, give as oct string, e.g, '777' + or 0o777 + """ + self._call("SETPERMISSION", method="put", path=path, permission=mod) + + def chown(self, path, owner=None, group=None): + """Change owning user and/or group""" + kwargs = {} + if owner is not None: + kwargs["owner"] = owner + if group is not None: + kwargs["group"] = group + self._call("SETOWNER", method="put", path=path, **kwargs) + + def set_replication(self, path, replication): + """ + Set file replication factor + + Parameters + ---------- + path: str + File location (not for directories) + replication: int + Number of copies of file on the cluster. Should be smaller than + number of data nodes; normally 3 on most systems. + """ + self._call("SETREPLICATION", path=path, method="put", replication=replication) + + def mkdir(self, path, **kwargs): + self._call("MKDIRS", method="put", path=path) + + def makedirs(self, path, exist_ok=False): + if exist_ok is False and self.exists(path): + raise FileExistsError(path) + self.mkdir(path) + + def mv(self, path1, path2, **kwargs): + self._call("RENAME", method="put", path=path1, destination=path2) + + def rm(self, path, recursive=False, **kwargs): + self._call( + "DELETE", + method="delete", + path=path, + recursive="true" if recursive else "false", + ) + + def rm_file(self, path, **kwargs): + self.rm(path) + + def cp_file(self, lpath, rpath, **kwargs): + with self.open(lpath) as lstream: + tmp_fname = "/".join([self._parent(rpath), f".tmp.{secrets.token_hex(16)}"]) + # Perform an atomic copy (stream to a temporary file and + # move it to the actual destination). + try: + with self.open(tmp_fname, "wb") as rstream: + shutil.copyfileobj(lstream, rstream) + self.mv(tmp_fname, rpath) + except BaseException: + with suppress(FileNotFoundError): + self.rm(tmp_fname) + raise + + def _apply_proxy(self, location): + if self.proxy and callable(self.proxy): + location = self.proxy(location) + elif self.proxy: + # as a dict + for k, v in self.proxy.items(): + location = location.replace(k, v, 1) + return location + + +class WebHDFile(AbstractBufferedFile): + """A file living in HDFS over webHDFS""" + + def __init__(self, fs, path, **kwargs): + super().__init__(fs, path, **kwargs) + kwargs = kwargs.copy() + if kwargs.get("permissions", None) is None: + kwargs.pop("permissions", None) + if kwargs.get("replication", None) is None: + kwargs.pop("replication", None) + self.permissions = kwargs.pop("permissions", 511) + tempdir = kwargs.pop("tempdir") + if kwargs.pop("autocommit", False) is False: + self.target = self.path + self.path = os.path.join(tempdir, str(uuid.uuid4())) + + def _upload_chunk(self, final=False): + """Write one part of a multi-block file upload + + Parameters + ========== + final: bool + This is the last block, so should complete file, if + self.autocommit is True. + """ + out = self.fs.session.post( + self.location, + data=self.buffer.getvalue(), + headers={"content-type": "application/octet-stream"}, + ) + out.raise_for_status() + return True + + def _initiate_upload(self): + """Create remote file/upload""" + kwargs = self.kwargs.copy() + if "a" in self.mode: + op, method = "APPEND", "POST" + else: + op, method = "CREATE", "PUT" + kwargs["overwrite"] = "true" + out = self.fs._call(op, method, self.path, redirect=False, **kwargs) + location = self.fs._apply_proxy(out.headers["Location"]) + if "w" in self.mode: + # create empty file to append to + out2 = self.fs.session.put( + location, headers={"content-type": "application/octet-stream"} + ) + out2.raise_for_status() + # after creating empty file, change location to append to + out2 = self.fs._call("APPEND", "POST", self.path, redirect=False, **kwargs) + self.location = self.fs._apply_proxy(out2.headers["Location"]) + + def _fetch_range(self, start, end): + start = max(start, 0) + end = min(self.size, end) + if start >= end or start >= self.size: + return b"" + out = self.fs._call( + "OPEN", path=self.path, offset=start, length=end - start, redirect=False + ) + out.raise_for_status() + if "Location" in out.headers: + location = out.headers["Location"] + out2 = self.fs.session.get(self.fs._apply_proxy(location)) + return out2.content + else: + return out.content + + def commit(self): + self.fs.mv(self.path, self.target) + + def discard(self): + self.fs.rm(self.path) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/zip.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/zip.py new file mode 100644 index 0000000000000000000000000000000000000000..6db3ae27806106a19a366886ab4b183f85c1cb1a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/implementations/zip.py @@ -0,0 +1,177 @@ +import os +import zipfile + +import fsspec +from fsspec.archive import AbstractArchiveFileSystem + + +class ZipFileSystem(AbstractArchiveFileSystem): + """Read/Write contents of ZIP archive as a file-system + + Keeps file object open while instance lives. + + This class is pickleable, but not necessarily thread-safe + """ + + root_marker = "" + protocol = "zip" + cachable = False + + def __init__( + self, + fo="", + mode="r", + target_protocol=None, + target_options=None, + compression=zipfile.ZIP_STORED, + allowZip64=True, + compresslevel=None, + **kwargs, + ): + """ + Parameters + ---------- + fo: str or file-like + Contains ZIP, and must exist. If a str, will fetch file using + :meth:`~fsspec.open_files`, which must return one file exactly. + mode: str + Accept: "r", "w", "a" + target_protocol: str (optional) + If ``fo`` is a string, this value can be used to override the + FS protocol inferred from a URL + target_options: dict (optional) + Kwargs passed when instantiating the target FS, if ``fo`` is + a string. + compression, allowZip64, compresslevel: passed to ZipFile + Only relevant when creating a ZIP + """ + super().__init__(self, **kwargs) + if mode not in set("rwa"): + raise ValueError(f"mode '{mode}' no understood") + self.mode = mode + if isinstance(fo, (str, os.PathLike)): + if mode == "a": + m = "r+b" + else: + m = mode + "b" + fo = fsspec.open( + fo, mode=m, protocol=target_protocol, **(target_options or {}) + ) + self.force_zip_64 = allowZip64 + self.of = fo + self.fo = fo.__enter__() # the whole instance is a context + self.zip = zipfile.ZipFile( + self.fo, + mode=mode, + compression=compression, + allowZip64=allowZip64, + compresslevel=compresslevel, + ) + self.dir_cache = None + + @classmethod + def _strip_protocol(cls, path): + # zip file paths are always relative to the archive root + return super()._strip_protocol(path).lstrip("/") + + def __del__(self): + if hasattr(self, "zip"): + self.close() + del self.zip + + def close(self): + """Commits any write changes to the file. Done on ``del`` too.""" + self.zip.close() + + def _get_dirs(self): + if self.dir_cache is None or self.mode in set("wa"): + # when writing, dir_cache is always in the ZipFile's attributes, + # not read from the file. + files = self.zip.infolist() + self.dir_cache = { + dirname.rstrip("/"): { + "name": dirname.rstrip("/"), + "size": 0, + "type": "directory", + } + for dirname in self._all_dirnames(self.zip.namelist()) + } + for z in files: + f = {s: getattr(z, s, None) for s in zipfile.ZipInfo.__slots__} + f.update( + { + "name": z.filename.rstrip("/"), + "size": z.file_size, + "type": ("directory" if z.is_dir() else "file"), + } + ) + self.dir_cache[f["name"]] = f + + def pipe_file(self, path, value, **kwargs): + # override upstream, because we know the exact file size in this case + self.zip.writestr(path, value, **kwargs) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + path = self._strip_protocol(path) + if "r" in mode and self.mode in set("wa"): + if self.exists(path): + raise OSError("ZipFS can only be open for reading or writing, not both") + raise FileNotFoundError(path) + if "r" in self.mode and "w" in mode: + raise OSError("ZipFS can only be open for reading or writing, not both") + out = self.zip.open(path, mode.strip("b"), force_zip64=self.force_zip_64) + if "r" in mode: + info = self.info(path) + out.size = info["size"] + out.name = info["name"] + return out + + def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + # Remove the leading slash, as the zip file paths are always + # given without a leading slash + path = path.lstrip("/") + path_parts = list(filter(lambda s: bool(s), path.split("/"))) + + def _matching_starts(file_path): + file_parts = filter(lambda s: bool(s), file_path.split("/")) + return all(a == b for a, b in zip(path_parts, file_parts)) + + self._get_dirs() + + result = {} + # To match posix find, if an exact file name is given, we should + # return only that file + if path in self.dir_cache and self.dir_cache[path]["type"] == "file": + result[path] = self.dir_cache[path] + return result if detail else [path] + + for file_path, file_info in self.dir_cache.items(): + if not (path == "" or _matching_starts(file_path)): + continue + + if file_info["type"] == "directory": + if withdirs: + if file_path not in result: + result[file_path.strip("/")] = file_info + continue + + if file_path not in result: + result[file_path] = file_info if detail else None + + if maxdepth: + path_depth = path.count("/") + result = { + k: v for k, v in result.items() if k.count("/") - path_depth < maxdepth + } + return result if detail else sorted(result) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/json.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/json.py new file mode 100644 index 0000000000000000000000000000000000000000..3bd2485ef1cc581d608f8627cb4133c198e35293 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/json.py @@ -0,0 +1,117 @@ +import json +from collections.abc import Mapping, Sequence +from contextlib import suppress +from pathlib import PurePath +from typing import ( + Any, + Callable, + ClassVar, + Optional, +) + +from .registry import _import_class, get_filesystem_class +from .spec import AbstractFileSystem + + +class FilesystemJSONEncoder(json.JSONEncoder): + include_password: ClassVar[bool] = True + + def default(self, o: Any) -> Any: + if isinstance(o, AbstractFileSystem): + return o.to_dict(include_password=self.include_password) + if isinstance(o, PurePath): + cls = type(o) + return {"cls": f"{cls.__module__}.{cls.__name__}", "str": str(o)} + + return super().default(o) + + def make_serializable(self, obj: Any) -> Any: + """ + Recursively converts an object so that it can be JSON serialized via + :func:`json.dumps` and :func:`json.dump`, without actually calling + said functions. + """ + if isinstance(obj, (str, int, float, bool)): + return obj + if isinstance(obj, Mapping): + return {k: self.make_serializable(v) for k, v in obj.items()} + if isinstance(obj, Sequence): + return [self.make_serializable(v) for v in obj] + + return self.default(obj) + + +class FilesystemJSONDecoder(json.JSONDecoder): + def __init__( + self, + *, + object_hook: Optional[Callable[[dict[str, Any]], Any]] = None, + parse_float: Optional[Callable[[str], Any]] = None, + parse_int: Optional[Callable[[str], Any]] = None, + parse_constant: Optional[Callable[[str], Any]] = None, + strict: bool = True, + object_pairs_hook: Optional[Callable[[list[tuple[str, Any]]], Any]] = None, + ) -> None: + self.original_object_hook = object_hook + + super().__init__( + object_hook=self.custom_object_hook, + parse_float=parse_float, + parse_int=parse_int, + parse_constant=parse_constant, + strict=strict, + object_pairs_hook=object_pairs_hook, + ) + + @classmethod + def try_resolve_path_cls(cls, dct: dict[str, Any]): + with suppress(Exception): + fqp = dct["cls"] + + path_cls = _import_class(fqp) + + if issubclass(path_cls, PurePath): + return path_cls + + return None + + @classmethod + def try_resolve_fs_cls(cls, dct: dict[str, Any]): + with suppress(Exception): + if "cls" in dct: + try: + fs_cls = _import_class(dct["cls"]) + if issubclass(fs_cls, AbstractFileSystem): + return fs_cls + except Exception: + if "protocol" in dct: # Fallback if cls cannot be imported + return get_filesystem_class(dct["protocol"]) + + raise + + return None + + def custom_object_hook(self, dct: dict[str, Any]): + if "cls" in dct: + if (obj_cls := self.try_resolve_fs_cls(dct)) is not None: + return AbstractFileSystem.from_dict(dct) + if (obj_cls := self.try_resolve_path_cls(dct)) is not None: + return obj_cls(dct["str"]) + + if self.original_object_hook is not None: + return self.original_object_hook(dct) + + return dct + + def unmake_serializable(self, obj: Any) -> Any: + """ + Inverse function of :meth:`FilesystemJSONEncoder.make_serializable`. + """ + if isinstance(obj, dict): + obj = self.custom_object_hook(obj) + if isinstance(obj, dict): + return {k: self.unmake_serializable(v) for k, v in obj.items()} + if isinstance(obj, (list, tuple)): + return [self.unmake_serializable(v) for v in obj] + + return obj diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/mapping.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/mapping.py new file mode 100644 index 0000000000000000000000000000000000000000..752eef35273b13eded7297e2e801b58e436a25b1 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/mapping.py @@ -0,0 +1,251 @@ +import array +import logging +import posixpath +import warnings +from collections.abc import MutableMapping +from functools import cached_property + +from fsspec.core import url_to_fs + +logger = logging.getLogger("fsspec.mapping") + + +class FSMap(MutableMapping): + """Wrap a FileSystem instance as a mutable wrapping. + + The keys of the mapping become files under the given root, and the + values (which must be bytes) the contents of those files. + + Parameters + ---------- + root: string + prefix for all the files + fs: FileSystem instance + check: bool (=True) + performs a touch at the location, to check for write access. + + Examples + -------- + >>> fs = FileSystem(**parameters) # doctest: +SKIP + >>> d = FSMap('my-data/path/', fs) # doctest: +SKIP + or, more likely + >>> d = fs.get_mapper('my-data/path/') + + >>> d['loc1'] = b'Hello World' # doctest: +SKIP + >>> list(d.keys()) # doctest: +SKIP + ['loc1'] + >>> d['loc1'] # doctest: +SKIP + b'Hello World' + """ + + def __init__(self, root, fs, check=False, create=False, missing_exceptions=None): + self.fs = fs + self.root = fs._strip_protocol(root) + self._root_key_to_str = fs._strip_protocol(posixpath.join(root, "x"))[:-1] + if missing_exceptions is None: + missing_exceptions = ( + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + ) + self.missing_exceptions = missing_exceptions + self.check = check + self.create = create + if create: + if not self.fs.exists(root): + self.fs.mkdir(root) + if check: + if not self.fs.exists(root): + raise ValueError( + f"Path {root} does not exist. Create " + f" with the ``create=True`` keyword" + ) + self.fs.touch(root + "/a") + self.fs.rm(root + "/a") + + @cached_property + def dirfs(self): + """dirfs instance that can be used with the same keys as the mapper""" + from .implementations.dirfs import DirFileSystem + + return DirFileSystem(path=self._root_key_to_str, fs=self.fs) + + def clear(self): + """Remove all keys below root - empties out mapping""" + logger.info("Clear mapping at %s", self.root) + try: + self.fs.rm(self.root, True) + self.fs.mkdir(self.root) + except: # noqa: E722 + pass + + def getitems(self, keys, on_error="raise"): + """Fetch multiple items from the store + + If the backend is async-able, this might proceed concurrently + + Parameters + ---------- + keys: list(str) + They keys to be fetched + on_error : "raise", "omit", "return" + If raise, an underlying exception will be raised (converted to KeyError + if the type is in self.missing_exceptions); if omit, keys with exception + will simply not be included in the output; if "return", all keys are + included in the output, but the value will be bytes or an exception + instance. + + Returns + ------- + dict(key, bytes|exception) + """ + keys2 = [self._key_to_str(k) for k in keys] + oe = on_error if on_error == "raise" else "return" + try: + out = self.fs.cat(keys2, on_error=oe) + if isinstance(out, bytes): + out = {keys2[0]: out} + except self.missing_exceptions as e: + raise KeyError from e + out = { + k: (KeyError() if isinstance(v, self.missing_exceptions) else v) + for k, v in out.items() + } + return { + key: out[k2] if on_error == "raise" else out.get(k2, KeyError(k2)) + for key, k2 in zip(keys, keys2) + if on_error == "return" or not isinstance(out[k2], BaseException) + } + + def setitems(self, values_dict): + """Set the values of multiple items in the store + + Parameters + ---------- + values_dict: dict(str, bytes) + """ + values = {self._key_to_str(k): maybe_convert(v) for k, v in values_dict.items()} + self.fs.pipe(values) + + def delitems(self, keys): + """Remove multiple keys from the store""" + self.fs.rm([self._key_to_str(k) for k in keys]) + + def _key_to_str(self, key): + """Generate full path for the key""" + if not isinstance(key, str): + # raise TypeError("key must be of type `str`, got `{type(key).__name__}`" + warnings.warn( + "from fsspec 2023.5 onward FSMap non-str keys will raise TypeError", + DeprecationWarning, + ) + if isinstance(key, list): + key = tuple(key) + key = str(key) + return f"{self._root_key_to_str}{key}".rstrip("/") + + def _str_to_key(self, s): + """Strip path of to leave key name""" + return s[len(self.root) :].lstrip("/") + + def __getitem__(self, key, default=None): + """Retrieve data""" + k = self._key_to_str(key) + try: + result = self.fs.cat(k) + except self.missing_exceptions as exc: + if default is not None: + return default + raise KeyError(key) from exc + return result + + def pop(self, key, default=None): + """Pop data""" + result = self.__getitem__(key, default) + try: + del self[key] + except KeyError: + pass + return result + + def __setitem__(self, key, value): + """Store value in key""" + key = self._key_to_str(key) + self.fs.mkdirs(self.fs._parent(key), exist_ok=True) + self.fs.pipe_file(key, maybe_convert(value)) + + def __iter__(self): + return (self._str_to_key(x) for x in self.fs.find(self.root)) + + def __len__(self): + return len(self.fs.find(self.root)) + + def __delitem__(self, key): + """Remove key""" + try: + self.fs.rm(self._key_to_str(key)) + except Exception as exc: + raise KeyError from exc + + def __contains__(self, key): + """Does key exist in mapping?""" + path = self._key_to_str(key) + return self.fs.isfile(path) + + def __reduce__(self): + return FSMap, (self.root, self.fs, False, False, self.missing_exceptions) + + +def maybe_convert(value): + if isinstance(value, array.array) or hasattr(value, "__array__"): + # bytes-like things + if hasattr(value, "dtype") and value.dtype.kind in "Mm": + # The buffer interface doesn't support datetime64/timdelta64 numpy + # arrays + value = value.view("int64") + value = bytes(memoryview(value)) + return value + + +def get_mapper( + url="", + check=False, + create=False, + missing_exceptions=None, + alternate_root=None, + **kwargs, +): + """Create key-value interface for given URL and options + + The URL will be of the form "protocol://location" and point to the root + of the mapper required. All keys will be file-names below this location, + and their values the contents of each key. + + Also accepts compound URLs like zip::s3://bucket/file.zip , see ``fsspec.open``. + + Parameters + ---------- + url: str + Root URL of mapping + check: bool + Whether to attempt to read from the location before instantiation, to + check that the mapping does exist + create: bool + Whether to make the directory corresponding to the root before + instantiating + missing_exceptions: None or tuple + If given, these exception types will be regarded as missing keys and + return KeyError when trying to read data. By default, you get + (FileNotFoundError, IsADirectoryError, NotADirectoryError) + alternate_root: None or str + In cases of complex URLs, the parser may fail to pick the correct part + for the mapper root, so this arg can override + + Returns + ------- + ``FSMap`` instance, the dict-like key-value store. + """ + # Removing protocol here - could defer to each open() on the backend + fs, urlpath = url_to_fs(url, **kwargs) + root = alternate_root if alternate_root is not None else urlpath + return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/parquet.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/parquet.py new file mode 100644 index 0000000000000000000000000000000000000000..faedb7b9e0aa90b6fb7cba33e794c4b4fb35eb77 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/parquet.py @@ -0,0 +1,541 @@ +import io +import json +import warnings + +from .core import url_to_fs +from .utils import merge_offset_ranges + +# Parquet-Specific Utilities for fsspec +# +# Most of the functions defined in this module are NOT +# intended for public consumption. The only exception +# to this is `open_parquet_file`, which should be used +# place of `fs.open()` to open parquet-formatted files +# on remote file systems. + + +def open_parquet_file( + path, + mode="rb", + fs=None, + metadata=None, + columns=None, + row_groups=None, + storage_options=None, + strict=False, + engine="auto", + max_gap=64_000, + max_block=256_000_000, + footer_sample_size=1_000_000, + **kwargs, +): + """ + Return a file-like object for a single Parquet file. + + The specified parquet `engine` will be used to parse the + footer metadata, and determine the required byte ranges + from the file. The target path will then be opened with + the "parts" (`KnownPartsOfAFile`) caching strategy. + + Note that this method is intended for usage with remote + file systems, and is unlikely to improve parquet-read + performance on local file systems. + + Parameters + ---------- + path: str + Target file path. + mode: str, optional + Mode option to be passed through to `fs.open`. Default is "rb". + metadata: Any, optional + Parquet metadata object. Object type must be supported + by the backend parquet engine. For now, only the "fastparquet" + engine supports an explicit `ParquetFile` metadata object. + If a metadata object is supplied, the remote footer metadata + will not need to be transferred into local memory. + fs: AbstractFileSystem, optional + Filesystem object to use for opening the file. If nothing is + specified, an `AbstractFileSystem` object will be inferred. + engine : str, default "auto" + Parquet engine to use for metadata parsing. Allowed options + include "fastparquet", "pyarrow", and "auto". The specified + engine must be installed in the current environment. If + "auto" is specified, and both engines are installed, + "fastparquet" will take precedence over "pyarrow". + columns: list, optional + List of all column names that may be read from the file. + row_groups : list, optional + List of all row-groups that may be read from the file. This + may be a list of row-group indices (integers), or it may be + a list of `RowGroup` metadata objects (if the "fastparquet" + engine is used). + storage_options : dict, optional + Used to generate an `AbstractFileSystem` object if `fs` was + not specified. + strict : bool, optional + Whether the resulting `KnownPartsOfAFile` cache should + fetch reads that go beyond a known byte-range boundary. + If `False` (the default), any read that ends outside a + known part will be zero padded. Note that using + `strict=True` may be useful for debugging. + max_gap : int, optional + Neighboring byte ranges will only be merged when their + inter-range gap is <= `max_gap`. Default is 64KB. + max_block : int, optional + Neighboring byte ranges will only be merged when the size of + the aggregated range is <= `max_block`. Default is 256MB. + footer_sample_size : int, optional + Number of bytes to read from the end of the path to look + for the footer metadata. If the sampled bytes do not contain + the footer, a second read request will be required, and + performance will suffer. Default is 1MB. + **kwargs : + Optional key-word arguments to pass to `fs.open` + """ + + # Make sure we have an `AbstractFileSystem` object + # to work with + if fs is None: + fs = url_to_fs(path, **(storage_options or {}))[0] + + # For now, `columns == []` not supported. Just use + # default `open` command with `path` input + if columns is not None and len(columns) == 0: + return fs.open(path, mode=mode) + + # Set the engine + engine = _set_engine(engine) + + # Fetch the known byte ranges needed to read + # `columns` and/or `row_groups` + data = _get_parquet_byte_ranges( + [path], + fs, + metadata=metadata, + columns=columns, + row_groups=row_groups, + engine=engine, + max_gap=max_gap, + max_block=max_block, + footer_sample_size=footer_sample_size, + ) + + # Extract file name from `data` + fn = next(iter(data)) if data else path + + # Call self.open with "parts" caching + options = kwargs.pop("cache_options", {}).copy() + return fs.open( + fn, + mode=mode, + cache_type="parts", + cache_options={ + **options, + "data": data.get(fn, {}), + "strict": strict, + }, + **kwargs, + ) + + +def _get_parquet_byte_ranges( + paths, + fs, + metadata=None, + columns=None, + row_groups=None, + max_gap=64_000, + max_block=256_000_000, + footer_sample_size=1_000_000, + engine="auto", +): + """Get a dictionary of the known byte ranges needed + to read a specific column/row-group selection from a + Parquet dataset. Each value in the output dictionary + is intended for use as the `data` argument for the + `KnownPartsOfAFile` caching strategy of a single path. + """ + + # Set engine if necessary + if isinstance(engine, str): + engine = _set_engine(engine) + + # Pass to specialized function if metadata is defined + if metadata is not None: + # Use the provided parquet metadata object + # to avoid transferring/parsing footer metadata + return _get_parquet_byte_ranges_from_metadata( + metadata, + fs, + engine, + columns=columns, + row_groups=row_groups, + max_gap=max_gap, + max_block=max_block, + ) + + # Get file sizes asynchronously + file_sizes = fs.sizes(paths) + + # Populate global paths, starts, & ends + result = {} + data_paths = [] + data_starts = [] + data_ends = [] + add_header_magic = True + if columns is None and row_groups is None: + # We are NOT selecting specific columns or row-groups. + # + # We can avoid sampling the footers, and just transfer + # all file data with cat_ranges + for i, path in enumerate(paths): + result[path] = {} + for b in range(0, file_sizes[i], max_block): + data_paths.append(path) + data_starts.append(b) + data_ends.append(min(b + max_block, file_sizes[i])) + add_header_magic = False # "Magic" should already be included + else: + # We ARE selecting specific columns or row-groups. + # + # Gather file footers. + # We just take the last `footer_sample_size` bytes of each + # file (or the entire file if it is smaller than that) + footer_starts = [] + footer_ends = [] + for i, path in enumerate(paths): + footer_ends.append(file_sizes[i]) + sample_size = max(0, file_sizes[i] - footer_sample_size) + footer_starts.append(sample_size) + footer_samples = fs.cat_ranges(paths, footer_starts, footer_ends) + + # Check our footer samples and re-sample if necessary. + missing_footer_starts = footer_starts.copy() + large_footer = 0 + for i, path in enumerate(paths): + footer_size = int.from_bytes(footer_samples[i][-8:-4], "little") + real_footer_start = file_sizes[i] - (footer_size + 8) + if real_footer_start < footer_starts[i]: + missing_footer_starts[i] = real_footer_start + large_footer = max(large_footer, (footer_size + 8)) + if large_footer: + warnings.warn( + f"Not enough data was used to sample the parquet footer. " + f"Try setting footer_sample_size >= {large_footer}." + ) + for i, block in enumerate( + fs.cat_ranges( + paths, + missing_footer_starts, + footer_starts, + ) + ): + footer_samples[i] = block + footer_samples[i] + footer_starts[i] = missing_footer_starts[i] + + # Calculate required byte ranges for each path + for i, path in enumerate(paths): + # Deal with small-file case. + # Just include all remaining bytes of the file + # in a single range. + if file_sizes[i] < max_block: + if footer_starts[i] > 0: + # Only need to transfer the data if the + # footer sample isn't already the whole file + data_paths.append(path) + data_starts.append(0) + data_ends.append(footer_starts[i]) + continue + + # Use "engine" to collect data byte ranges + path_data_starts, path_data_ends = engine._parquet_byte_ranges( + columns, + row_groups=row_groups, + footer=footer_samples[i], + footer_start=footer_starts[i], + ) + + data_paths += [path] * len(path_data_starts) + data_starts += path_data_starts + data_ends += path_data_ends + + # Merge adjacent offset ranges + data_paths, data_starts, data_ends = merge_offset_ranges( + data_paths, + data_starts, + data_ends, + max_gap=max_gap, + max_block=max_block, + sort=False, # Should already be sorted + ) + + # Start by populating `result` with footer samples + for i, path in enumerate(paths): + result[path] = {(footer_starts[i], footer_ends[i]): footer_samples[i]} + + # Transfer the data byte-ranges into local memory + _transfer_ranges(fs, result, data_paths, data_starts, data_ends) + + # Add b"PAR1" to header if necessary + if add_header_magic: + _add_header_magic(result) + + return result + + +def _get_parquet_byte_ranges_from_metadata( + metadata, + fs, + engine, + columns=None, + row_groups=None, + max_gap=64_000, + max_block=256_000_000, +): + """Simplified version of `_get_parquet_byte_ranges` for + the case that an engine-specific `metadata` object is + provided, and the remote footer metadata does not need to + be transferred before calculating the required byte ranges. + """ + + # Use "engine" to collect data byte ranges + data_paths, data_starts, data_ends = engine._parquet_byte_ranges( + columns, + row_groups=row_groups, + metadata=metadata, + ) + + # Merge adjacent offset ranges + data_paths, data_starts, data_ends = merge_offset_ranges( + data_paths, + data_starts, + data_ends, + max_gap=max_gap, + max_block=max_block, + sort=False, # Should be sorted + ) + + # Transfer the data byte-ranges into local memory + result = {fn: {} for fn in list(set(data_paths))} + _transfer_ranges(fs, result, data_paths, data_starts, data_ends) + + # Add b"PAR1" to header + _add_header_magic(result) + + return result + + +def _transfer_ranges(fs, blocks, paths, starts, ends): + # Use cat_ranges to gather the data byte_ranges + ranges = (paths, starts, ends) + for path, start, stop, data in zip(*ranges, fs.cat_ranges(*ranges)): + blocks[path][(start, stop)] = data + + +def _add_header_magic(data): + # Add b"PAR1" to file headers + for path in list(data.keys()): + add_magic = True + for k in data[path]: + if k[0] == 0 and k[1] >= 4: + add_magic = False + break + if add_magic: + data[path][(0, 4)] = b"PAR1" + + +def _set_engine(engine_str): + # Define a list of parquet engines to try + if engine_str == "auto": + try_engines = ("fastparquet", "pyarrow") + elif not isinstance(engine_str, str): + raise ValueError( + "Failed to set parquet engine! " + "Please pass 'fastparquet', 'pyarrow', or 'auto'" + ) + elif engine_str not in ("fastparquet", "pyarrow"): + raise ValueError(f"{engine_str} engine not supported by `fsspec.parquet`") + else: + try_engines = [engine_str] + + # Try importing the engines in `try_engines`, + # and choose the first one that succeeds + for engine in try_engines: + try: + if engine == "fastparquet": + return FastparquetEngine() + elif engine == "pyarrow": + return PyarrowEngine() + except ImportError: + pass + + # Raise an error if a supported parquet engine + # was not found + raise ImportError( + f"The following parquet engines are not installed " + f"in your python environment: {try_engines}." + f"Please install 'fastparquert' or 'pyarrow' to " + f"utilize the `fsspec.parquet` module." + ) + + +class FastparquetEngine: + # The purpose of the FastparquetEngine class is + # to check if fastparquet can be imported (on initialization) + # and to define a `_parquet_byte_ranges` method. In the + # future, this class may also be used to define other + # methods/logic that are specific to fastparquet. + + def __init__(self): + import fastparquet as fp + + self.fp = fp + + def _row_group_filename(self, row_group, pf): + return pf.row_group_filename(row_group) + + def _parquet_byte_ranges( + self, + columns, + row_groups=None, + metadata=None, + footer=None, + footer_start=None, + ): + # Initialize offset ranges and define ParqetFile metadata + pf = metadata + data_paths, data_starts, data_ends = [], [], [] + if pf is None: + pf = self.fp.ParquetFile(io.BytesIO(footer)) + + # Convert columns to a set and add any index columns + # specified in the pandas metadata (just in case) + column_set = None if columns is None else set(columns) + if column_set is not None and hasattr(pf, "pandas_metadata"): + md_index = [ + ind + for ind in pf.pandas_metadata.get("index_columns", []) + # Ignore RangeIndex information + if not isinstance(ind, dict) + ] + column_set |= set(md_index) + + # Check if row_groups is a list of integers + # or a list of row-group metadata + if row_groups and not isinstance(row_groups[0], int): + # Input row_groups contains row-group metadata + row_group_indices = None + else: + # Input row_groups contains row-group indices + row_group_indices = row_groups + row_groups = pf.row_groups + + # Loop through column chunks to add required byte ranges + for r, row_group in enumerate(row_groups): + # Skip this row-group if we are targeting + # specific row-groups + if row_group_indices is None or r in row_group_indices: + # Find the target parquet-file path for `row_group` + fn = self._row_group_filename(row_group, pf) + + for column in row_group.columns: + name = column.meta_data.path_in_schema[0] + # Skip this column if we are targeting a + # specific columns + if column_set is None or name in column_set: + file_offset0 = column.meta_data.dictionary_page_offset + if file_offset0 is None: + file_offset0 = column.meta_data.data_page_offset + num_bytes = column.meta_data.total_compressed_size + if footer_start is None or file_offset0 < footer_start: + data_paths.append(fn) + data_starts.append(file_offset0) + data_ends.append( + min( + file_offset0 + num_bytes, + footer_start or (file_offset0 + num_bytes), + ) + ) + + if metadata: + # The metadata in this call may map to multiple + # file paths. Need to include `data_paths` + return data_paths, data_starts, data_ends + return data_starts, data_ends + + +class PyarrowEngine: + # The purpose of the PyarrowEngine class is + # to check if pyarrow can be imported (on initialization) + # and to define a `_parquet_byte_ranges` method. In the + # future, this class may also be used to define other + # methods/logic that are specific to pyarrow. + + def __init__(self): + import pyarrow.parquet as pq + + self.pq = pq + + def _row_group_filename(self, row_group, metadata): + raise NotImplementedError + + def _parquet_byte_ranges( + self, + columns, + row_groups=None, + metadata=None, + footer=None, + footer_start=None, + ): + if metadata is not None: + raise ValueError("metadata input not supported for PyarrowEngine") + + data_starts, data_ends = [], [] + md = self.pq.ParquetFile(io.BytesIO(footer)).metadata + + # Convert columns to a set and add any index columns + # specified in the pandas metadata (just in case) + column_set = None if columns is None else set(columns) + if column_set is not None: + schema = md.schema.to_arrow_schema() + has_pandas_metadata = ( + schema.metadata is not None and b"pandas" in schema.metadata + ) + if has_pandas_metadata: + md_index = [ + ind + for ind in json.loads( + schema.metadata[b"pandas"].decode("utf8") + ).get("index_columns", []) + # Ignore RangeIndex information + if not isinstance(ind, dict) + ] + column_set |= set(md_index) + + # Loop through column chunks to add required byte ranges + for r in range(md.num_row_groups): + # Skip this row-group if we are targeting + # specific row-groups + if row_groups is None or r in row_groups: + row_group = md.row_group(r) + for c in range(row_group.num_columns): + column = row_group.column(c) + name = column.path_in_schema + # Skip this column if we are targeting a + # specific columns + split_name = name.split(".")[0] + if ( + column_set is None + or name in column_set + or split_name in column_set + ): + file_offset0 = column.dictionary_page_offset + if file_offset0 is None: + file_offset0 = column.data_page_offset + num_bytes = column.total_compressed_size + if file_offset0 < footer_start: + data_starts.append(file_offset0) + data_ends.append( + min(file_offset0 + num_bytes, footer_start) + ) + return data_starts, data_ends diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/registry.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..96ffad7f4a889662c8fb4a006e1c497652992430 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/registry.py @@ -0,0 +1,330 @@ +from __future__ import annotations + +import importlib +import types +import warnings + +__all__ = ["registry", "get_filesystem_class", "default"] + +# internal, mutable +_registry: dict[str, type] = {} + +# external, immutable +registry = types.MappingProxyType(_registry) +default = "file" + + +def register_implementation(name, cls, clobber=False, errtxt=None): + """Add implementation class to the registry + + Parameters + ---------- + name: str + Protocol name to associate with the class + cls: class or str + if a class: fsspec-compliant implementation class (normally inherits from + ``fsspec.AbstractFileSystem``, gets added straight to the registry. If a + str, the full path to an implementation class like package.module.class, + which gets added to known_implementations, + so the import is deferred until the filesystem is actually used. + clobber: bool (optional) + Whether to overwrite a protocol with the same name; if False, will raise + instead. + errtxt: str (optional) + If given, then a failure to import the given class will result in this + text being given. + """ + if isinstance(cls, str): + if name in known_implementations and clobber is False: + if cls != known_implementations[name]["class"]: + raise ValueError( + f"Name ({name}) already in the known_implementations and clobber " + f"is False" + ) + else: + known_implementations[name] = { + "class": cls, + "err": errtxt or f"{cls} import failed for protocol {name}", + } + + else: + if name in registry and clobber is False: + if _registry[name] is not cls: + raise ValueError( + f"Name ({name}) already in the registry and clobber is False" + ) + else: + _registry[name] = cls + + +# protocols mapped to the class which implements them. This dict can be +# updated with register_implementation +known_implementations = { + "abfs": { + "class": "adlfs.AzureBlobFileSystem", + "err": "Install adlfs to access Azure Datalake Gen2 and Azure Blob Storage", + }, + "adl": { + "class": "adlfs.AzureDatalakeFileSystem", + "err": "Install adlfs to access Azure Datalake Gen1", + }, + "arrow_hdfs": { + "class": "fsspec.implementations.arrow.HadoopFileSystem", + "err": "pyarrow and local java libraries required for HDFS", + }, + "asynclocal": { + "class": "morefs.asyn_local.AsyncLocalFileSystem", + "err": "Install 'morefs[asynclocalfs]' to use AsyncLocalFileSystem", + }, + "asyncwrapper": { + "class": "fsspec.implementations.asyn_wrapper.AsyncFileSystemWrapper", + }, + "az": { + "class": "adlfs.AzureBlobFileSystem", + "err": "Install adlfs to access Azure Datalake Gen2 and Azure Blob Storage", + }, + "blockcache": {"class": "fsspec.implementations.cached.CachingFileSystem"}, + "box": { + "class": "boxfs.BoxFileSystem", + "err": "Please install boxfs to access BoxFileSystem", + }, + "cached": {"class": "fsspec.implementations.cached.CachingFileSystem"}, + "dask": { + "class": "fsspec.implementations.dask.DaskWorkerFileSystem", + "err": "Install dask distributed to access worker file system", + }, + "data": {"class": "fsspec.implementations.data.DataFileSystem"}, + "dbfs": { + "class": "fsspec.implementations.dbfs.DatabricksFileSystem", + "err": "Install the requests package to use the DatabricksFileSystem", + }, + "dir": {"class": "fsspec.implementations.dirfs.DirFileSystem"}, + "dropbox": { + "class": "dropboxdrivefs.DropboxDriveFileSystem", + "err": ( + 'DropboxFileSystem requires "dropboxdrivefs","requests" and "' + '"dropbox" to be installed' + ), + }, + "dvc": { + "class": "dvc.api.DVCFileSystem", + "err": "Install dvc to access DVCFileSystem", + }, + "file": {"class": "fsspec.implementations.local.LocalFileSystem"}, + "filecache": {"class": "fsspec.implementations.cached.WholeFileCacheFileSystem"}, + "ftp": {"class": "fsspec.implementations.ftp.FTPFileSystem"}, + "gcs": { + "class": "gcsfs.GCSFileSystem", + "err": "Please install gcsfs to access Google Storage", + }, + "gdrive": { + "class": "gdrive_fsspec.GoogleDriveFileSystem", + "err": "Please install gdrive_fs for access to Google Drive", + }, + "generic": {"class": "fsspec.generic.GenericFileSystem"}, + "gist": { + "class": "fsspec.implementations.gist.GistFileSystem", + "err": "Install the requests package to use the gist FS", + }, + "git": { + "class": "fsspec.implementations.git.GitFileSystem", + "err": "Install pygit2 to browse local git repos", + }, + "github": { + "class": "fsspec.implementations.github.GithubFileSystem", + "err": "Install the requests package to use the github FS", + }, + "gs": { + "class": "gcsfs.GCSFileSystem", + "err": "Please install gcsfs to access Google Storage", + }, + "hdfs": { + "class": "fsspec.implementations.arrow.HadoopFileSystem", + "err": "pyarrow and local java libraries required for HDFS", + }, + "hf": { + "class": "huggingface_hub.HfFileSystem", + "err": "Install huggingface_hub to access HfFileSystem", + }, + "http": { + "class": "fsspec.implementations.http.HTTPFileSystem", + "err": 'HTTPFileSystem requires "requests" and "aiohttp" to be installed', + }, + "https": { + "class": "fsspec.implementations.http.HTTPFileSystem", + "err": 'HTTPFileSystem requires "requests" and "aiohttp" to be installed', + }, + "jlab": { + "class": "fsspec.implementations.jupyter.JupyterFileSystem", + "err": "Jupyter FS requires requests to be installed", + }, + "jupyter": { + "class": "fsspec.implementations.jupyter.JupyterFileSystem", + "err": "Jupyter FS requires requests to be installed", + }, + "lakefs": { + "class": "lakefs_spec.LakeFSFileSystem", + "err": "Please install lakefs-spec to access LakeFSFileSystem", + }, + "libarchive": { + "class": "fsspec.implementations.libarchive.LibArchiveFileSystem", + "err": "LibArchive requires to be installed", + }, + "local": {"class": "fsspec.implementations.local.LocalFileSystem"}, + "memory": {"class": "fsspec.implementations.memory.MemoryFileSystem"}, + "oci": { + "class": "ocifs.OCIFileSystem", + "err": "Install ocifs to access OCI Object Storage", + }, + "ocilake": { + "class": "ocifs.OCIFileSystem", + "err": "Install ocifs to access OCI Data Lake", + }, + "oss": { + "class": "ossfs.OSSFileSystem", + "err": "Install ossfs to access Alibaba Object Storage System", + }, + "pyscript": { + "class": "pyscript_fsspec_client.client.PyscriptFileSystem", + "err": "Install requests (cpython) or run in pyscript", + }, + "reference": {"class": "fsspec.implementations.reference.ReferenceFileSystem"}, + "root": { + "class": "fsspec_xrootd.XRootDFileSystem", + "err": ( + "Install fsspec-xrootd to access xrootd storage system. " + "Note: 'root' is the protocol name for xrootd storage systems, " + "not referring to root directories" + ), + }, + "s3": {"class": "s3fs.S3FileSystem", "err": "Install s3fs to access S3"}, + "s3a": {"class": "s3fs.S3FileSystem", "err": "Install s3fs to access S3"}, + "sftp": { + "class": "fsspec.implementations.sftp.SFTPFileSystem", + "err": 'SFTPFileSystem requires "paramiko" to be installed', + }, + "simplecache": {"class": "fsspec.implementations.cached.SimpleCacheFileSystem"}, + "smb": { + "class": "fsspec.implementations.smb.SMBFileSystem", + "err": 'SMB requires "smbprotocol" or "smbprotocol[kerberos]" installed', + }, + "ssh": { + "class": "fsspec.implementations.sftp.SFTPFileSystem", + "err": 'SFTPFileSystem requires "paramiko" to be installed', + }, + "tar": {"class": "fsspec.implementations.tar.TarFileSystem"}, + "tos": { + "class": "tosfs.TosFileSystem", + "err": "Install tosfs to access ByteDance volcano engine Tinder Object Storage", + }, + "tosfs": { + "class": "tosfs.TosFileSystem", + "err": "Install tosfs to access ByteDance volcano engine Tinder Object Storage", + }, + "wandb": {"class": "wandbfs.WandbFS", "err": "Install wandbfs to access wandb"}, + "webdav": { + "class": "webdav4.fsspec.WebdavFileSystem", + "err": "Install webdav4 to access WebDAV", + }, + "webhdfs": { + "class": "fsspec.implementations.webhdfs.WebHDFS", + "err": 'webHDFS access requires "requests" to be installed', + }, + "zip": {"class": "fsspec.implementations.zip.ZipFileSystem"}, +} + +assert list(known_implementations) == sorted(known_implementations), ( + "Not in alphabetical order" +) + + +def get_filesystem_class(protocol): + """Fetch named protocol implementation from the registry + + The dict ``known_implementations`` maps protocol names to the locations + of classes implementing the corresponding file-system. When used for the + first time, appropriate imports will happen and the class will be placed in + the registry. All subsequent calls will fetch directly from the registry. + + Some protocol implementations require additional dependencies, and so the + import may fail. In this case, the string in the "err" field of the + ``known_implementations`` will be given as the error message. + """ + if not protocol: + protocol = default + + if protocol not in registry: + if protocol not in known_implementations: + raise ValueError(f"Protocol not known: {protocol}") + bit = known_implementations[protocol] + try: + register_implementation(protocol, _import_class(bit["class"])) + except ImportError as e: + raise ImportError(bit.get("err")) from e + cls = registry[protocol] + if getattr(cls, "protocol", None) in ("abstract", None): + cls.protocol = protocol + + return cls + + +s3_msg = """Your installed version of s3fs is very old and known to cause +severe performance issues, see also https://github.com/dask/dask/issues/10276 + +To fix, you should specify a lower version bound on s3fs, or +update the current installation. +""" + + +def _import_class(fqp: str): + """Take a fully-qualified path and return the imported class or identifier. + + ``fqp`` is of the form "package.module.klass" or + "package.module:subobject.klass". + + Warnings + -------- + This can import arbitrary modules. Make sure you haven't installed any modules + that may execute malicious code at import time. + """ + if ":" in fqp: + mod, name = fqp.rsplit(":", 1) + else: + mod, name = fqp.rsplit(".", 1) + + is_s3 = mod == "s3fs" + mod = importlib.import_module(mod) + if is_s3 and mod.__version__.split(".") < ["0", "5"]: + warnings.warn(s3_msg) + for part in name.split("."): + mod = getattr(mod, part) + + if not isinstance(mod, type): + raise TypeError(f"{fqp} is not a class") + + return mod + + +def filesystem(protocol, **storage_options): + """Instantiate filesystems for given protocol and arguments + + ``storage_options`` are specific to the protocol being chosen, and are + passed directly to the class. + """ + if protocol == "arrow_hdfs": + warnings.warn( + "The 'arrow_hdfs' protocol has been deprecated and will be " + "removed in the future. Specify it as 'hdfs'.", + DeprecationWarning, + ) + + cls = get_filesystem_class(protocol) + return cls(**storage_options) + + +def available_protocols(): + """Return a list of the implemented protocols. + + Note that any given protocol may require extra packages to be importable. + """ + return list(known_implementations) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/spec.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/spec.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6f9a10441c12ef8db4870c4e5dc72262037c6e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/spec.py @@ -0,0 +1,2270 @@ +from __future__ import annotations + +import io +import json +import logging +import os +import threading +import warnings +import weakref +from errno import ESPIPE +from glob import has_magic +from hashlib import sha256 +from typing import Any, ClassVar + +from .callbacks import DEFAULT_CALLBACK +from .config import apply_config, conf +from .dircache import DirCache +from .transaction import Transaction +from .utils import ( + _unstrip_protocol, + glob_translate, + isfilelike, + other_paths, + read_block, + stringify_path, + tokenize, +) + +logger = logging.getLogger("fsspec") + + +def make_instance(cls, args, kwargs): + return cls(*args, **kwargs) + + +class _Cached(type): + """ + Metaclass for caching file system instances. + + Notes + ----- + Instances are cached according to + + * The values of the class attributes listed in `_extra_tokenize_attributes` + * The arguments passed to ``__init__``. + + This creates an additional reference to the filesystem, which prevents the + filesystem from being garbage collected when all *user* references go away. + A call to the :meth:`AbstractFileSystem.clear_instance_cache` must *also* + be made for a filesystem instance to be garbage collected. + """ + + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + # Note: we intentionally create a reference here, to avoid garbage + # collecting instances when all other references are gone. To really + # delete a FileSystem, the cache must be cleared. + if conf.get("weakref_instance_cache"): # pragma: no cover + # debug option for analysing fork/spawn conditions + cls._cache = weakref.WeakValueDictionary() + else: + cls._cache = {} + cls._pid = os.getpid() + + def __call__(cls, *args, **kwargs): + kwargs = apply_config(cls, kwargs) + extra_tokens = tuple( + getattr(cls, attr, None) for attr in cls._extra_tokenize_attributes + ) + token = tokenize( + cls, cls._pid, threading.get_ident(), *args, *extra_tokens, **kwargs + ) + skip = kwargs.pop("skip_instance_cache", False) + if os.getpid() != cls._pid: + cls._cache.clear() + cls._pid = os.getpid() + if not skip and cls.cachable and token in cls._cache: + cls._latest = token + return cls._cache[token] + else: + obj = super().__call__(*args, **kwargs) + # Setting _fs_token here causes some static linters to complain. + obj._fs_token_ = token + obj.storage_args = args + obj.storage_options = kwargs + if obj.async_impl and obj.mirror_sync_methods: + from .asyn import mirror_sync_methods + + mirror_sync_methods(obj) + + if cls.cachable and not skip: + cls._latest = token + cls._cache[token] = obj + return obj + + +class AbstractFileSystem(metaclass=_Cached): + """ + An abstract super-class for pythonic file-systems + + Implementations are expected to be compatible with or, better, subclass + from here. + """ + + cachable = True # this class can be cached, instances reused + _cached = False + blocksize = 2**22 + sep = "/" + protocol: ClassVar[str | tuple[str, ...]] = "abstract" + _latest = None + async_impl = False + mirror_sync_methods = False + root_marker = "" # For some FSs, may require leading '/' or other character + transaction_type = Transaction + + #: Extra *class attributes* that should be considered when hashing. + _extra_tokenize_attributes = () + + # Set by _Cached metaclass + storage_args: tuple[Any, ...] + storage_options: dict[str, Any] + + def __init__(self, *args, **storage_options): + """Create and configure file-system instance + + Instances may be cachable, so if similar enough arguments are seen + a new instance is not required. The token attribute exists to allow + implementations to cache instances if they wish. + + A reasonable default should be provided if there are no arguments. + + Subclasses should call this method. + + Parameters + ---------- + use_listings_cache, listings_expiry_time, max_paths: + passed to ``DirCache``, if the implementation supports + directory listing caching. Pass use_listings_cache=False + to disable such caching. + skip_instance_cache: bool + If this is a cachable implementation, pass True here to force + creating a new instance even if a matching instance exists, and prevent + storing this instance. + asynchronous: bool + loop: asyncio-compatible IOLoop or None + """ + if self._cached: + # reusing instance, don't change + return + self._cached = True + self._intrans = False + self._transaction = None + self._invalidated_caches_in_transaction = [] + self.dircache = DirCache(**storage_options) + + if storage_options.pop("add_docs", None): + warnings.warn("add_docs is no longer supported.", FutureWarning) + + if storage_options.pop("add_aliases", None): + warnings.warn("add_aliases has been removed.", FutureWarning) + # This is set in _Cached + self._fs_token_ = None + + @property + def fsid(self): + """Persistent filesystem id that can be used to compare filesystems + across sessions. + """ + raise NotImplementedError + + @property + def _fs_token(self): + return self._fs_token_ + + def __dask_tokenize__(self): + return self._fs_token + + def __hash__(self): + return int(self._fs_token, 16) + + def __eq__(self, other): + return isinstance(other, type(self)) and self._fs_token == other._fs_token + + def __reduce__(self): + return make_instance, (type(self), self.storage_args, self.storage_options) + + @classmethod + def _strip_protocol(cls, path): + """Turn path from fully-qualified to file-system-specific + + May require FS-specific handling, e.g., for relative paths or links. + """ + if isinstance(path, list): + return [cls._strip_protocol(p) for p in path] + path = stringify_path(path) + protos = (cls.protocol,) if isinstance(cls.protocol, str) else cls.protocol + for protocol in protos: + if path.startswith(protocol + "://"): + path = path[len(protocol) + 3 :] + elif path.startswith(protocol + "::"): + path = path[len(protocol) + 2 :] + path = path.rstrip("/") + # use of root_marker to make minimum required path, e.g., "/" + return path or cls.root_marker + + def unstrip_protocol(self, name: str) -> str: + """Format FS-specific path to generic, including protocol""" + protos = (self.protocol,) if isinstance(self.protocol, str) else self.protocol + for protocol in protos: + if name.startswith(f"{protocol}://"): + return name + return f"{protos[0]}://{name}" + + @staticmethod + def _get_kwargs_from_urls(path): + """If kwargs can be encoded in the paths, extract them here + + This should happen before instantiation of the class; incoming paths + then should be amended to strip the options in methods. + + Examples may look like an sftp path "sftp://user@host:/my/path", where + the user and host should become kwargs and later get stripped. + """ + # by default, nothing happens + return {} + + @classmethod + def current(cls): + """Return the most recently instantiated FileSystem + + If no instance has been created, then create one with defaults + """ + if cls._latest in cls._cache: + return cls._cache[cls._latest] + return cls() + + @property + def transaction(self): + """A context within which files are committed together upon exit + + Requires the file class to implement `.commit()` and `.discard()` + for the normal and exception cases. + """ + if self._transaction is None: + self._transaction = self.transaction_type(self) + return self._transaction + + def start_transaction(self): + """Begin write transaction for deferring files, non-context version""" + self._intrans = True + self._transaction = self.transaction_type(self) + return self.transaction + + def end_transaction(self): + """Finish write transaction, non-context version""" + self.transaction.complete() + self._transaction = None + # The invalid cache must be cleared after the transaction is completed. + for path in self._invalidated_caches_in_transaction: + self.invalidate_cache(path) + self._invalidated_caches_in_transaction.clear() + + def invalidate_cache(self, path=None): + """ + Discard any cached directory information + + Parameters + ---------- + path: string or None + If None, clear all listings cached else listings at or under given + path. + """ + # Not necessary to implement invalidation mechanism, may have no cache. + # But if have, you should call this method of parent class from your + # subclass to ensure expiring caches after transacations correctly. + # See the implementation of FTPFileSystem in ftp.py + if self._intrans: + self._invalidated_caches_in_transaction.append(path) + + def mkdir(self, path, create_parents=True, **kwargs): + """ + Create directory entry at path + + For systems that don't have true directories, may create an for + this instance only and not touch the real filesystem + + Parameters + ---------- + path: str + location + create_parents: bool + if True, this is equivalent to ``makedirs`` + kwargs: + may be permissions, etc. + """ + pass # not necessary to implement, may not have directories + + def makedirs(self, path, exist_ok=False): + """Recursively make directories + + Creates directory at path and any intervening required directories. + Raises exception if, for instance, the path already exists but is a + file. + + Parameters + ---------- + path: str + leaf directory name + exist_ok: bool (False) + If False, will error if the target already exists + """ + pass # not necessary to implement, may not have directories + + def rmdir(self, path): + """Remove a directory, if empty""" + pass # not necessary to implement, may not have directories + + def ls(self, path, detail=True, **kwargs): + """List objects at path. + + This should include subdirectories and files at that location. The + difference between a file and a directory must be clear when details + are requested. + + The specific keys, or perhaps a FileInfo class, or similar, is TBD, + but must be consistent across implementations. + Must include: + + - full path to the entry (without protocol) + - size of the entry, in bytes. If the value cannot be determined, will + be ``None``. + - type of entry, "file", "directory" or other + + Additional information + may be present, appropriate to the file-system, e.g., generation, + checksum, etc. + + May use refresh=True|False to allow use of self._ls_from_cache to + check for a saved listing and avoid calling the backend. This would be + common where listing may be expensive. + + Parameters + ---------- + path: str + detail: bool + if True, gives a list of dictionaries, where each is the same as + the result of ``info(path)``. If False, gives a list of paths + (str). + kwargs: may have additional backend-specific options, such as version + information + + Returns + ------- + List of strings if detail is False, or list of directory information + dicts if detail is True. + """ + raise NotImplementedError + + def _ls_from_cache(self, path): + """Check cache for listing + + Returns listing, if found (may be empty list for a directly that exists + but contains nothing), None if not in cache. + """ + parent = self._parent(path) + try: + return self.dircache[path.rstrip("/")] + except KeyError: + pass + try: + files = [ + f + for f in self.dircache[parent] + if f["name"] == path + or (f["name"] == path.rstrip("/") and f["type"] == "directory") + ] + if len(files) == 0: + # parent dir was listed but did not contain this file + raise FileNotFoundError(path) + return files + except KeyError: + pass + + def walk(self, path, maxdepth=None, topdown=True, on_error="omit", **kwargs): + """Return all files under the given path. + + List all files, recursing into subdirectories; output is iterator-style, + like ``os.walk()``. For a simple list of files, ``find()`` is available. + + When topdown is True, the caller can modify the dirnames list in-place (perhaps + using del or slice assignment), and walk() will + only recurse into the subdirectories whose names remain in dirnames; + this can be used to prune the search, impose a specific order of visiting, + or even to inform walk() about directories the caller creates or renames before + it resumes walk() again. + Modifying dirnames when topdown is False has no effect. (see os.walk) + + Note that the "files" outputted will include anything that is not + a directory, such as links. + + Parameters + ---------- + path: str + Root to recurse into + maxdepth: int + Maximum recursion depth. None means limitless, but not recommended + on link-based file-systems. + topdown: bool (True) + Whether to walk the directory tree from the top downwards or from + the bottom upwards. + on_error: "omit", "raise", a callable + if omit (default), path with exception will simply be empty; + If raise, an underlying exception will be raised; + if callable, it will be called with a single OSError instance as argument + kwargs: passed to ``ls`` + """ + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + path = self._strip_protocol(path) + full_dirs = {} + dirs = {} + files = {} + + detail = kwargs.pop("detail", False) + try: + listing = self.ls(path, detail=True, **kwargs) + except (FileNotFoundError, OSError) as e: + if on_error == "raise": + raise + if callable(on_error): + on_error(e) + return + + for info in listing: + # each info name must be at least [path]/part , but here + # we check also for names like [path]/part/ + pathname = info["name"].rstrip("/") + name = pathname.rsplit("/", 1)[-1] + if info["type"] == "directory" and pathname != path: + # do not include "self" path + full_dirs[name] = pathname + dirs[name] = info + elif pathname == path: + # file-like with same name as give path + files[""] = info + else: + files[name] = info + + if not detail: + dirs = list(dirs) + files = list(files) + + if topdown: + # Yield before recursion if walking top down + yield path, dirs, files + + if maxdepth is not None: + maxdepth -= 1 + if maxdepth < 1: + if not topdown: + yield path, dirs, files + return + + for d in dirs: + yield from self.walk( + full_dirs[d], + maxdepth=maxdepth, + detail=detail, + topdown=topdown, + **kwargs, + ) + + if not topdown: + # Yield after recursion if walking bottom up + yield path, dirs, files + + def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + """List all files below path. + + Like posix ``find`` command without conditions + + Parameters + ---------- + path : str + maxdepth: int or None + If not None, the maximum number of levels to descend + withdirs: bool + Whether to include directory paths in the output. This is True + when used by glob, but users usually only want files. + kwargs are passed to ``ls``. + """ + # TODO: allow equivalent of -name parameter + path = self._strip_protocol(path) + out = {} + + # Add the root directory if withdirs is requested + # This is needed for posix glob compliance + if withdirs and path != "" and self.isdir(path): + out[path] = self.info(path) + + for _, dirs, files in self.walk(path, maxdepth, detail=True, **kwargs): + if withdirs: + files.update(dirs) + out.update({info["name"]: info for name, info in files.items()}) + if not out and self.isfile(path): + # walk works on directories, but find should also return [path] + # when path happens to be a file + out[path] = {} + names = sorted(out) + if not detail: + return names + else: + return {name: out[name] for name in names} + + def du(self, path, total=True, maxdepth=None, withdirs=False, **kwargs): + """Space used by files and optionally directories within a path + + Directory size does not include the size of its contents. + + Parameters + ---------- + path: str + total: bool + Whether to sum all the file sizes + maxdepth: int or None + Maximum number of directory levels to descend, None for unlimited. + withdirs: bool + Whether to include directory paths in the output. + kwargs: passed to ``find`` + + Returns + ------- + Dict of {path: size} if total=False, or int otherwise, where numbers + refer to bytes used. + """ + sizes = {} + if withdirs and self.isdir(path): + # Include top-level directory in output + info = self.info(path) + sizes[info["name"]] = info["size"] + for f in self.find(path, maxdepth=maxdepth, withdirs=withdirs, **kwargs): + info = self.info(f) + sizes[info["name"]] = info["size"] + if total: + return sum(sizes.values()) + else: + return sizes + + def glob(self, path, maxdepth=None, **kwargs): + """Find files by glob-matching. + + Pattern matching capabilities for finding files that match the given pattern. + + Parameters + ---------- + path: str + The glob pattern to match against + maxdepth: int or None + Maximum depth for ``'**'`` patterns. Applied on the first ``'**'`` found. + Must be at least 1 if provided. + kwargs: + Additional arguments passed to ``find`` (e.g., detail=True) + + Returns + ------- + List of matched paths, or dict of paths and their info if detail=True + + Notes + ----- + Supported patterns: + - '*': Matches any sequence of characters within a single directory level + - ``'**'``: Matches any number of directory levels (must be an entire path component) + - '?': Matches exactly one character + - '[abc]': Matches any character in the set + - '[a-z]': Matches any character in the range + - '[!abc]': Matches any character NOT in the set + + Special behaviors: + - If the path ends with '/', only folders are returned + - Consecutive '*' characters are compressed into a single '*' + - Empty brackets '[]' never match anything + - Negated empty brackets '[!]' match any single character + - Special characters in character classes are escaped properly + + Limitations: + - ``'**'`` must be a complete path component (e.g., ``'a/**/b'``, not ``'a**b'``) + - No brace expansion ('{a,b}.txt') + - No extended glob patterns ('+(pattern)', '!(pattern)') + """ + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + import re + + seps = (os.path.sep, os.path.altsep) if os.path.altsep else (os.path.sep,) + ends_with_sep = path.endswith(seps) # _strip_protocol strips trailing slash + path = self._strip_protocol(path) + append_slash_to_dirname = ends_with_sep or path.endswith( + tuple(sep + "**" for sep in seps) + ) + idx_star = path.find("*") if path.find("*") >= 0 else len(path) + idx_qmark = path.find("?") if path.find("?") >= 0 else len(path) + idx_brace = path.find("[") if path.find("[") >= 0 else len(path) + + min_idx = min(idx_star, idx_qmark, idx_brace) + + detail = kwargs.pop("detail", False) + + if not has_magic(path): + if self.exists(path, **kwargs): + if not detail: + return [path] + else: + return {path: self.info(path, **kwargs)} + else: + if not detail: + return [] # glob of non-existent returns empty + else: + return {} + elif "/" in path[:min_idx]: + min_idx = path[:min_idx].rindex("/") + root = path[: min_idx + 1] + depth = path[min_idx + 1 :].count("/") + 1 + else: + root = "" + depth = path[min_idx + 1 :].count("/") + 1 + + if "**" in path: + if maxdepth is not None: + idx_double_stars = path.find("**") + depth_double_stars = path[idx_double_stars:].count("/") + 1 + depth = depth - depth_double_stars + maxdepth + else: + depth = None + + allpaths = self.find(root, maxdepth=depth, withdirs=True, detail=True, **kwargs) + + pattern = glob_translate(path + ("/" if ends_with_sep else "")) + pattern = re.compile(pattern) + + out = { + p: info + for p, info in sorted(allpaths.items()) + if pattern.match( + p + "/" + if append_slash_to_dirname and info["type"] == "directory" + else p + ) + } + + if detail: + return out + else: + return list(out) + + def exists(self, path, **kwargs): + """Is there a file at the given path""" + try: + self.info(path, **kwargs) + return True + except: # noqa: E722 + # any exception allowed bar FileNotFoundError? + return False + + def lexists(self, path, **kwargs): + """If there is a file at the given path (including + broken links)""" + return self.exists(path) + + def info(self, path, **kwargs): + """Give details of entry at path + + Returns a single dictionary, with exactly the same information as ``ls`` + would with ``detail=True``. + + The default implementation calls ls and could be overridden by a + shortcut. kwargs are passed on to ```ls()``. + + Some file systems might not be able to measure the file's size, in + which case, the returned dict will include ``'size': None``. + + Returns + ------- + dict with keys: name (full path in the FS), size (in bytes), type (file, + directory, or something else) and other FS-specific keys. + """ + path = self._strip_protocol(path) + out = self.ls(self._parent(path), detail=True, **kwargs) + out = [o for o in out if o["name"].rstrip("/") == path] + if out: + return out[0] + out = self.ls(path, detail=True, **kwargs) + path = path.rstrip("/") + out1 = [o for o in out if o["name"].rstrip("/") == path] + if len(out1) == 1: + if "size" not in out1[0]: + out1[0]["size"] = None + return out1[0] + elif len(out1) > 1 or out: + return {"name": path, "size": 0, "type": "directory"} + else: + raise FileNotFoundError(path) + + def checksum(self, path): + """Unique value for current version of file + + If the checksum is the same from one moment to another, the contents + are guaranteed to be the same. If the checksum changes, the contents + *might* have changed. + + This should normally be overridden; default will probably capture + creation/modification timestamp (which would be good) or maybe + access timestamp (which would be bad) + """ + return int(tokenize(self.info(path)), 16) + + def size(self, path): + """Size in bytes of file""" + return self.info(path).get("size", None) + + def sizes(self, paths): + """Size in bytes of each file in a list of paths""" + return [self.size(p) for p in paths] + + def isdir(self, path): + """Is this entry directory-like?""" + try: + return self.info(path)["type"] == "directory" + except OSError: + return False + + def isfile(self, path): + """Is this entry file-like?""" + try: + return self.info(path)["type"] == "file" + except: # noqa: E722 + return False + + def read_text(self, path, encoding=None, errors=None, newline=None, **kwargs): + """Get the contents of the file as a string. + + Parameters + ---------- + path: str + URL of file on this filesystems + encoding, errors, newline: same as `open`. + """ + with self.open( + path, + mode="r", + encoding=encoding, + errors=errors, + newline=newline, + **kwargs, + ) as f: + return f.read() + + def write_text( + self, path, value, encoding=None, errors=None, newline=None, **kwargs + ): + """Write the text to the given file. + + An existing file will be overwritten. + + Parameters + ---------- + path: str + URL of file on this filesystems + value: str + Text to write. + encoding, errors, newline: same as `open`. + """ + with self.open( + path, + mode="w", + encoding=encoding, + errors=errors, + newline=newline, + **kwargs, + ) as f: + return f.write(value) + + def cat_file(self, path, start=None, end=None, **kwargs): + """Get the content of a file + + Parameters + ---------- + path: URL of file on this filesystems + start, end: int + Bytes limits of the read. If negative, backwards from end, + like usual python slices. Either can be None for start or + end of file, respectively + kwargs: passed to ``open()``. + """ + # explicitly set buffering off? + with self.open(path, "rb", **kwargs) as f: + if start is not None: + if start >= 0: + f.seek(start) + else: + f.seek(max(0, f.size + start)) + if end is not None: + if end < 0: + end = f.size + end + return f.read(end - f.tell()) + return f.read() + + def pipe_file(self, path, value, mode="overwrite", **kwargs): + """Set the bytes of given file""" + if mode == "create" and self.exists(path): + # non-atomic but simple way; or could use "xb" in open(), which is likely + # not as well supported + raise FileExistsError + with self.open(path, "wb", **kwargs) as f: + f.write(value) + + def pipe(self, path, value=None, **kwargs): + """Put value into path + + (counterpart to ``cat``) + + Parameters + ---------- + path: string or dict(str, bytes) + If a string, a single remote location to put ``value`` bytes; if a dict, + a mapping of {path: bytesvalue}. + value: bytes, optional + If using a single path, these are the bytes to put there. Ignored if + ``path`` is a dict + """ + if isinstance(path, str): + self.pipe_file(self._strip_protocol(path), value, **kwargs) + elif isinstance(path, dict): + for k, v in path.items(): + self.pipe_file(self._strip_protocol(k), v, **kwargs) + else: + raise ValueError("path must be str or dict") + + def cat_ranges( + self, paths, starts, ends, max_gap=None, on_error="return", **kwargs + ): + """Get the contents of byte ranges from one or more files + + Parameters + ---------- + paths: list + A list of of filepaths on this filesystems + starts, ends: int or list + Bytes limits of the read. If using a single int, the same value will be + used to read all the specified files. + """ + if max_gap is not None: + raise NotImplementedError + if not isinstance(paths, list): + raise TypeError + if not isinstance(starts, list): + starts = [starts] * len(paths) + if not isinstance(ends, list): + ends = [ends] * len(paths) + if len(starts) != len(paths) or len(ends) != len(paths): + raise ValueError + out = [] + for p, s, e in zip(paths, starts, ends): + try: + out.append(self.cat_file(p, s, e)) + except Exception as e: + if on_error == "return": + out.append(e) + else: + raise + return out + + def cat(self, path, recursive=False, on_error="raise", **kwargs): + """Fetch (potentially multiple) paths' contents + + Parameters + ---------- + recursive: bool + If True, assume the path(s) are directories, and get all the + contained files + on_error : "raise", "omit", "return" + If raise, an underlying exception will be raised (converted to KeyError + if the type is in self.missing_exceptions); if omit, keys with exception + will simply not be included in the output; if "return", all keys are + included in the output, but the value will be bytes or an exception + instance. + kwargs: passed to cat_file + + Returns + ------- + dict of {path: contents} if there are multiple paths + or the path has been otherwise expanded + """ + paths = self.expand_path(path, recursive=recursive) + if ( + len(paths) > 1 + or isinstance(path, list) + or paths[0] != self._strip_protocol(path) + ): + out = {} + for path in paths: + try: + out[path] = self.cat_file(path, **kwargs) + except Exception as e: + if on_error == "raise": + raise + if on_error == "return": + out[path] = e + return out + else: + return self.cat_file(paths[0], **kwargs) + + def get_file(self, rpath, lpath, callback=DEFAULT_CALLBACK, outfile=None, **kwargs): + """Copy single remote file to local""" + from .implementations.local import LocalFileSystem + + if isfilelike(lpath): + outfile = lpath + elif self.isdir(rpath): + os.makedirs(lpath, exist_ok=True) + return None + + fs = LocalFileSystem(auto_mkdir=True) + fs.makedirs(fs._parent(lpath), exist_ok=True) + + with self.open(rpath, "rb", **kwargs) as f1: + if outfile is None: + outfile = open(lpath, "wb") + + try: + callback.set_size(getattr(f1, "size", None)) + data = True + while data: + data = f1.read(self.blocksize) + segment_len = outfile.write(data) + if segment_len is None: + segment_len = len(data) + callback.relative_update(segment_len) + finally: + if not isfilelike(lpath): + outfile.close() + + def get( + self, + rpath, + lpath, + recursive=False, + callback=DEFAULT_CALLBACK, + maxdepth=None, + **kwargs, + ): + """Copy file(s) to local. + + Copies a specific file or tree of files (if recursive=True). If lpath + ends with a "/", it will be assumed to be a directory, and target files + will go within. Can submit a list of paths, which may be glob-patterns + and will be expanded. + + Calls get_file for each source. + """ + if isinstance(lpath, list) and isinstance(rpath, list): + # No need to expand paths when both source and destination + # are provided as lists + rpaths = rpath + lpaths = lpath + else: + from .implementations.local import ( + LocalFileSystem, + make_path_posix, + trailing_sep, + ) + + source_is_str = isinstance(rpath, str) + rpaths = self.expand_path(rpath, recursive=recursive, maxdepth=maxdepth) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + rpaths = [p for p in rpaths if not (trailing_sep(p) or self.isdir(p))] + if not rpaths: + return + + if isinstance(lpath, str): + lpath = make_path_posix(lpath) + + source_is_file = len(rpaths) == 1 + dest_is_dir = isinstance(lpath, str) and ( + trailing_sep(lpath) or LocalFileSystem().isdir(lpath) + ) + + exists = source_is_str and ( + (has_magic(rpath) and source_is_file) + or (not has_magic(rpath) and dest_is_dir and not trailing_sep(rpath)) + ) + lpaths = other_paths( + rpaths, + lpath, + exists=exists, + flatten=not source_is_str, + ) + + callback.set_size(len(lpaths)) + for lpath, rpath in callback.wrap(zip(lpaths, rpaths)): + with callback.branched(rpath, lpath) as child: + self.get_file(rpath, lpath, callback=child, **kwargs) + + def put_file( + self, lpath, rpath, callback=DEFAULT_CALLBACK, mode="overwrite", **kwargs + ): + """Copy single file to remote""" + if mode == "create" and self.exists(rpath): + raise FileExistsError + if os.path.isdir(lpath): + self.makedirs(rpath, exist_ok=True) + return None + + with open(lpath, "rb") as f1: + size = f1.seek(0, 2) + callback.set_size(size) + f1.seek(0) + + self.mkdirs(self._parent(os.fspath(rpath)), exist_ok=True) + with self.open(rpath, "wb", **kwargs) as f2: + while f1.tell() < size: + data = f1.read(self.blocksize) + segment_len = f2.write(data) + if segment_len is None: + segment_len = len(data) + callback.relative_update(segment_len) + + def put( + self, + lpath, + rpath, + recursive=False, + callback=DEFAULT_CALLBACK, + maxdepth=None, + **kwargs, + ): + """Copy file(s) from local. + + Copies a specific file or tree of files (if recursive=True). If rpath + ends with a "/", it will be assumed to be a directory, and target files + will go within. + + Calls put_file for each source. + """ + if isinstance(lpath, list) and isinstance(rpath, list): + # No need to expand paths when both source and destination + # are provided as lists + rpaths = rpath + lpaths = lpath + else: + from .implementations.local import ( + LocalFileSystem, + make_path_posix, + trailing_sep, + ) + + source_is_str = isinstance(lpath, str) + if source_is_str: + lpath = make_path_posix(lpath) + fs = LocalFileSystem() + lpaths = fs.expand_path(lpath, recursive=recursive, maxdepth=maxdepth) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + lpaths = [p for p in lpaths if not (trailing_sep(p) or fs.isdir(p))] + if not lpaths: + return + + source_is_file = len(lpaths) == 1 + dest_is_dir = isinstance(rpath, str) and ( + trailing_sep(rpath) or self.isdir(rpath) + ) + + rpath = ( + self._strip_protocol(rpath) + if isinstance(rpath, str) + else [self._strip_protocol(p) for p in rpath] + ) + exists = source_is_str and ( + (has_magic(lpath) and source_is_file) + or (not has_magic(lpath) and dest_is_dir and not trailing_sep(lpath)) + ) + rpaths = other_paths( + lpaths, + rpath, + exists=exists, + flatten=not source_is_str, + ) + + callback.set_size(len(rpaths)) + for lpath, rpath in callback.wrap(zip(lpaths, rpaths)): + with callback.branched(lpath, rpath) as child: + self.put_file(lpath, rpath, callback=child, **kwargs) + + def head(self, path, size=1024): + """Get the first ``size`` bytes from file""" + with self.open(path, "rb") as f: + return f.read(size) + + def tail(self, path, size=1024): + """Get the last ``size`` bytes from file""" + with self.open(path, "rb") as f: + f.seek(max(-size, -f.size), 2) + return f.read() + + def cp_file(self, path1, path2, **kwargs): + raise NotImplementedError + + def copy( + self, path1, path2, recursive=False, maxdepth=None, on_error=None, **kwargs + ): + """Copy within two locations in the filesystem + + on_error : "raise", "ignore" + If raise, any not-found exceptions will be raised; if ignore any + not-found exceptions will cause the path to be skipped; defaults to + raise unless recursive is true, where the default is ignore + """ + if on_error is None and recursive: + on_error = "ignore" + elif on_error is None: + on_error = "raise" + + if isinstance(path1, list) and isinstance(path2, list): + # No need to expand paths when both source and destination + # are provided as lists + paths1 = path1 + paths2 = path2 + else: + from .implementations.local import trailing_sep + + source_is_str = isinstance(path1, str) + paths1 = self.expand_path(path1, recursive=recursive, maxdepth=maxdepth) + if source_is_str and (not recursive or maxdepth is not None): + # Non-recursive glob does not copy directories + paths1 = [p for p in paths1 if not (trailing_sep(p) or self.isdir(p))] + if not paths1: + return + + source_is_file = len(paths1) == 1 + dest_is_dir = isinstance(path2, str) and ( + trailing_sep(path2) or self.isdir(path2) + ) + + exists = source_is_str and ( + (has_magic(path1) and source_is_file) + or (not has_magic(path1) and dest_is_dir and not trailing_sep(path1)) + ) + paths2 = other_paths( + paths1, + path2, + exists=exists, + flatten=not source_is_str, + ) + + for p1, p2 in zip(paths1, paths2): + try: + self.cp_file(p1, p2, **kwargs) + except FileNotFoundError: + if on_error == "raise": + raise + + def expand_path(self, path, recursive=False, maxdepth=None, **kwargs): + """Turn one or more globs or directories into a list of all matching paths + to files or directories. + + kwargs are passed to ``glob`` or ``find``, which may in turn call ``ls`` + """ + + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + if isinstance(path, (str, os.PathLike)): + out = self.expand_path([path], recursive, maxdepth) + else: + out = set() + path = [self._strip_protocol(p) for p in path] + for p in path: + if has_magic(p): + bit = set(self.glob(p, maxdepth=maxdepth, **kwargs)) + out |= bit + if recursive: + # glob call above expanded one depth so if maxdepth is defined + # then decrement it in expand_path call below. If it is zero + # after decrementing then avoid expand_path call. + if maxdepth is not None and maxdepth <= 1: + continue + out |= set( + self.expand_path( + list(bit), + recursive=recursive, + maxdepth=maxdepth - 1 if maxdepth is not None else None, + **kwargs, + ) + ) + continue + elif recursive: + rec = set( + self.find( + p, maxdepth=maxdepth, withdirs=True, detail=False, **kwargs + ) + ) + out |= rec + if p not in out and (recursive is False or self.exists(p)): + # should only check once, for the root + out.add(p) + if not out: + raise FileNotFoundError(path) + return sorted(out) + + def mv(self, path1, path2, recursive=False, maxdepth=None, **kwargs): + """Move file(s) from one location to another""" + if path1 == path2: + logger.debug("%s mv: The paths are the same, so no files were moved.", self) + else: + # explicitly raise exception to prevent data corruption + self.copy( + path1, path2, recursive=recursive, maxdepth=maxdepth, onerror="raise" + ) + self.rm(path1, recursive=recursive) + + def rm_file(self, path): + """Delete a file""" + self._rm(path) + + def _rm(self, path): + """Delete one file""" + # this is the old name for the method, prefer rm_file + raise NotImplementedError + + def rm(self, path, recursive=False, maxdepth=None): + """Delete files. + + Parameters + ---------- + path: str or list of str + File(s) to delete. + recursive: bool + If file(s) are directories, recursively delete contents and then + also remove the directory + maxdepth: int or None + Depth to pass to walk for finding files to delete, if recursive. + If None, there will be no limit and infinite recursion may be + possible. + """ + path = self.expand_path(path, recursive=recursive, maxdepth=maxdepth) + for p in reversed(path): + self.rm_file(p) + + @classmethod + def _parent(cls, path): + path = cls._strip_protocol(path) + if "/" in path: + parent = path.rsplit("/", 1)[0].lstrip(cls.root_marker) + return cls.root_marker + parent + else: + return cls.root_marker + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + """Return raw bytes-mode file-like from the file-system""" + return AbstractBufferedFile( + self, + path, + mode, + block_size, + autocommit, + cache_options=cache_options, + **kwargs, + ) + + def open( + self, + path, + mode="rb", + block_size=None, + cache_options=None, + compression=None, + **kwargs, + ): + """ + Return a file-like object from the filesystem + + The resultant instance must function correctly in a context ``with`` + block. + + Parameters + ---------- + path: str + Target file + mode: str like 'rb', 'w' + See builtin ``open()`` + Mode "x" (exclusive write) may be implemented by the backend. Even if + it is, whether it is checked up front or on commit, and whether it is + atomic is implementation-dependent. + block_size: int + Some indication of buffering - this is a value in bytes + cache_options : dict, optional + Extra arguments to pass through to the cache. + compression: string or None + If given, open file using compression codec. Can either be a compression + name (a key in ``fsspec.compression.compr``) or "infer" to guess the + compression from the filename suffix. + encoding, errors, newline: passed on to TextIOWrapper for text mode + """ + import io + + path = self._strip_protocol(path) + if "b" not in mode: + mode = mode.replace("t", "") + "b" + + text_kwargs = { + k: kwargs.pop(k) + for k in ["encoding", "errors", "newline"] + if k in kwargs + } + return io.TextIOWrapper( + self.open( + path, + mode, + block_size=block_size, + cache_options=cache_options, + compression=compression, + **kwargs, + ), + **text_kwargs, + ) + else: + ac = kwargs.pop("autocommit", not self._intrans) + f = self._open( + path, + mode=mode, + block_size=block_size, + autocommit=ac, + cache_options=cache_options, + **kwargs, + ) + if compression is not None: + from fsspec.compression import compr + from fsspec.core import get_compression + + compression = get_compression(path, compression) + compress = compr[compression] + f = compress(f, mode=mode[0]) + + if not ac and "r" not in mode: + self.transaction.files.append(f) + return f + + def touch(self, path, truncate=True, **kwargs): + """Create empty file, or update timestamp + + Parameters + ---------- + path: str + file location + truncate: bool + If True, always set file size to 0; if False, update timestamp and + leave file unchanged, if backend allows this + """ + if truncate or not self.exists(path): + with self.open(path, "wb", **kwargs): + pass + else: + raise NotImplementedError # update timestamp, if possible + + def ukey(self, path): + """Hash of file properties, to tell if it has changed""" + return sha256(str(self.info(path)).encode()).hexdigest() + + def read_block(self, fn, offset, length, delimiter=None): + """Read a block of bytes from + + Starting at ``offset`` of the file, read ``length`` bytes. If + ``delimiter`` is set then we ensure that the read starts and stops at + delimiter boundaries that follow the locations ``offset`` and ``offset + + length``. If ``offset`` is zero then we start at zero. The + bytestring returned WILL include the end delimiter string. + + If offset+length is beyond the eof, reads to eof. + + Parameters + ---------- + fn: string + Path to filename + offset: int + Byte offset to start read + length: int + Number of bytes to read. If None, read to end. + delimiter: bytes (optional) + Ensure reading starts and stops at delimiter bytestring + + Examples + -------- + >>> fs.read_block('data/file.csv', 0, 13) # doctest: +SKIP + b'Alice, 100\\nBo' + >>> fs.read_block('data/file.csv', 0, 13, delimiter=b'\\n') # doctest: +SKIP + b'Alice, 100\\nBob, 200\\n' + + Use ``length=None`` to read to the end of the file. + >>> fs.read_block('data/file.csv', 0, None, delimiter=b'\\n') # doctest: +SKIP + b'Alice, 100\\nBob, 200\\nCharlie, 300' + + See Also + -------- + :func:`fsspec.utils.read_block` + """ + with self.open(fn, "rb") as f: + size = f.size + if length is None: + length = size + if size is not None and offset + length > size: + length = size - offset + return read_block(f, offset, length, delimiter) + + def to_json(self, *, include_password: bool = True) -> str: + """ + JSON representation of this filesystem instance. + + Parameters + ---------- + include_password: bool, default True + Whether to include the password (if any) in the output. + + Returns + ------- + JSON string with keys ``cls`` (the python location of this class), + protocol (text name of this class's protocol, first one in case of + multiple), ``args`` (positional args, usually empty), and all other + keyword arguments as their own keys. + + Warnings + -------- + Serialized filesystems may contain sensitive information which have been + passed to the constructor, such as passwords and tokens. Make sure you + store and send them in a secure environment! + """ + from .json import FilesystemJSONEncoder + + return json.dumps( + self, + cls=type( + "_FilesystemJSONEncoder", + (FilesystemJSONEncoder,), + {"include_password": include_password}, + ), + ) + + @staticmethod + def from_json(blob: str) -> AbstractFileSystem: + """ + Recreate a filesystem instance from JSON representation. + + See ``.to_json()`` for the expected structure of the input. + + Parameters + ---------- + blob: str + + Returns + ------- + file system instance, not necessarily of this particular class. + + Warnings + -------- + This can import arbitrary modules (as determined by the ``cls`` key). + Make sure you haven't installed any modules that may execute malicious code + at import time. + """ + from .json import FilesystemJSONDecoder + + return json.loads(blob, cls=FilesystemJSONDecoder) + + def to_dict(self, *, include_password: bool = True) -> dict[str, Any]: + """ + JSON-serializable dictionary representation of this filesystem instance. + + Parameters + ---------- + include_password: bool, default True + Whether to include the password (if any) in the output. + + Returns + ------- + Dictionary with keys ``cls`` (the python location of this class), + protocol (text name of this class's protocol, first one in case of + multiple), ``args`` (positional args, usually empty), and all other + keyword arguments as their own keys. + + Warnings + -------- + Serialized filesystems may contain sensitive information which have been + passed to the constructor, such as passwords and tokens. Make sure you + store and send them in a secure environment! + """ + from .json import FilesystemJSONEncoder + + json_encoder = FilesystemJSONEncoder() + + cls = type(self) + proto = self.protocol + + storage_options = dict(self.storage_options) + if not include_password: + storage_options.pop("password", None) + + return dict( + cls=f"{cls.__module__}:{cls.__name__}", + protocol=proto[0] if isinstance(proto, (tuple, list)) else proto, + args=json_encoder.make_serializable(self.storage_args), + **json_encoder.make_serializable(storage_options), + ) + + @staticmethod + def from_dict(dct: dict[str, Any]) -> AbstractFileSystem: + """ + Recreate a filesystem instance from dictionary representation. + + See ``.to_dict()`` for the expected structure of the input. + + Parameters + ---------- + dct: Dict[str, Any] + + Returns + ------- + file system instance, not necessarily of this particular class. + + Warnings + -------- + This can import arbitrary modules (as determined by the ``cls`` key). + Make sure you haven't installed any modules that may execute malicious code + at import time. + """ + from .json import FilesystemJSONDecoder + + json_decoder = FilesystemJSONDecoder() + + dct = dict(dct) # Defensive copy + + cls = FilesystemJSONDecoder.try_resolve_fs_cls(dct) + if cls is None: + raise ValueError("Not a serialized AbstractFileSystem") + + dct.pop("cls", None) + dct.pop("protocol", None) + + return cls( + *json_decoder.unmake_serializable(dct.pop("args", ())), + **json_decoder.unmake_serializable(dct), + ) + + def _get_pyarrow_filesystem(self): + """ + Make a version of the FS instance which will be acceptable to pyarrow + """ + # all instances already also derive from pyarrow + return self + + def get_mapper(self, root="", check=False, create=False, missing_exceptions=None): + """Create key/value store based on this file-system + + Makes a MutableMapping interface to the FS at the given root path. + See ``fsspec.mapping.FSMap`` for further details. + """ + from .mapping import FSMap + + return FSMap( + root, + self, + check=check, + create=create, + missing_exceptions=missing_exceptions, + ) + + @classmethod + def clear_instance_cache(cls): + """ + Clear the cache of filesystem instances. + + Notes + ----- + Unless overridden by setting the ``cachable`` class attribute to False, + the filesystem class stores a reference to newly created instances. This + prevents Python's normal rules around garbage collection from working, + since the instances refcount will not drop to zero until + ``clear_instance_cache`` is called. + """ + cls._cache.clear() + + def created(self, path): + """Return the created timestamp of a file as a datetime.datetime""" + raise NotImplementedError + + def modified(self, path): + """Return the modified timestamp of a file as a datetime.datetime""" + raise NotImplementedError + + def tree( + self, + path: str = "/", + recursion_limit: int = 2, + max_display: int = 25, + display_size: bool = False, + prefix: str = "", + is_last: bool = True, + first: bool = True, + indent_size: int = 4, + ) -> str: + """ + Return a tree-like structure of the filesystem starting from the given path as a string. + + Parameters + ---------- + path: Root path to start traversal from + recursion_limit: Maximum depth of directory traversal + max_display: Maximum number of items to display per directory + display_size: Whether to display file sizes + prefix: Current line prefix for visual tree structure + is_last: Whether current item is last in its level + first: Whether this is the first call (displays root path) + indent_size: Number of spaces by indent + + Returns + ------- + str: A string representing the tree structure. + + Example + ------- + >>> from fsspec import filesystem + + >>> fs = filesystem('ftp', host='test.rebex.net', user='demo', password='password') + >>> tree = fs.tree(display_size=True, recursion_limit=3, indent_size=8, max_display=10) + >>> print(tree) + """ + + def format_bytes(n: int) -> str: + """Format bytes as text.""" + for prefix, k in ( + ("P", 2**50), + ("T", 2**40), + ("G", 2**30), + ("M", 2**20), + ("k", 2**10), + ): + if n >= 0.9 * k: + return f"{n / k:.2f} {prefix}b" + return f"{n}B" + + result = [] + + if first: + result.append(path) + + if recursion_limit: + indent = " " * indent_size + contents = self.ls(path, detail=True) + contents.sort( + key=lambda x: (x.get("type") != "directory", x.get("name", "")) + ) + + if max_display is not None and len(contents) > max_display: + displayed_contents = contents[:max_display] + remaining_count = len(contents) - max_display + else: + displayed_contents = contents + remaining_count = 0 + + for i, item in enumerate(displayed_contents): + is_last_item = (i == len(displayed_contents) - 1) and ( + remaining_count == 0 + ) + + branch = ( + "└" + ("─" * (indent_size - 2)) + if is_last_item + else "├" + ("─" * (indent_size - 2)) + ) + branch += " " + new_prefix = prefix + ( + indent if is_last_item else "│" + " " * (indent_size - 1) + ) + + name = os.path.basename(item.get("name", "")) + + if display_size and item.get("type") == "directory": + sub_contents = self.ls(item.get("name", ""), detail=True) + num_files = sum( + 1 for sub_item in sub_contents if sub_item.get("type") == "file" + ) + num_folders = sum( + 1 + for sub_item in sub_contents + if sub_item.get("type") == "directory" + ) + + if num_files == 0 and num_folders == 0: + size = " (empty folder)" + elif num_files == 0: + size = f" ({num_folders} subfolder{'s' if num_folders > 1 else ''})" + elif num_folders == 0: + size = f" ({num_files} file{'s' if num_files > 1 else ''})" + else: + size = f" ({num_files} file{'s' if num_files > 1 else ''}, {num_folders} subfolder{'s' if num_folders > 1 else ''})" + elif display_size and item.get("type") == "file": + size = f" ({format_bytes(item.get('size', 0))})" + else: + size = "" + + result.append(f"{prefix}{branch}{name}{size}") + + if item.get("type") == "directory" and recursion_limit > 0: + result.append( + self.tree( + path=item.get("name", ""), + recursion_limit=recursion_limit - 1, + max_display=max_display, + display_size=display_size, + prefix=new_prefix, + is_last=is_last_item, + first=False, + indent_size=indent_size, + ) + ) + + if remaining_count > 0: + more_message = f"{remaining_count} more item(s) not displayed." + result.append( + f"{prefix}{'└' + ('─' * (indent_size - 2))} {more_message}" + ) + + return "\n".join(_ for _ in result if _) + + # ------------------------------------------------------------------------ + # Aliases + + def read_bytes(self, path, start=None, end=None, **kwargs): + """Alias of `AbstractFileSystem.cat_file`.""" + return self.cat_file(path, start=start, end=end, **kwargs) + + def write_bytes(self, path, value, **kwargs): + """Alias of `AbstractFileSystem.pipe_file`.""" + self.pipe_file(path, value, **kwargs) + + def makedir(self, path, create_parents=True, **kwargs): + """Alias of `AbstractFileSystem.mkdir`.""" + return self.mkdir(path, create_parents=create_parents, **kwargs) + + def mkdirs(self, path, exist_ok=False): + """Alias of `AbstractFileSystem.makedirs`.""" + return self.makedirs(path, exist_ok=exist_ok) + + def listdir(self, path, detail=True, **kwargs): + """Alias of `AbstractFileSystem.ls`.""" + return self.ls(path, detail=detail, **kwargs) + + def cp(self, path1, path2, **kwargs): + """Alias of `AbstractFileSystem.copy`.""" + return self.copy(path1, path2, **kwargs) + + def move(self, path1, path2, **kwargs): + """Alias of `AbstractFileSystem.mv`.""" + return self.mv(path1, path2, **kwargs) + + def stat(self, path, **kwargs): + """Alias of `AbstractFileSystem.info`.""" + return self.info(path, **kwargs) + + def disk_usage(self, path, total=True, maxdepth=None, **kwargs): + """Alias of `AbstractFileSystem.du`.""" + return self.du(path, total=total, maxdepth=maxdepth, **kwargs) + + def rename(self, path1, path2, **kwargs): + """Alias of `AbstractFileSystem.mv`.""" + return self.mv(path1, path2, **kwargs) + + def delete(self, path, recursive=False, maxdepth=None): + """Alias of `AbstractFileSystem.rm`.""" + return self.rm(path, recursive=recursive, maxdepth=maxdepth) + + def upload(self, lpath, rpath, recursive=False, **kwargs): + """Alias of `AbstractFileSystem.put`.""" + return self.put(lpath, rpath, recursive=recursive, **kwargs) + + def download(self, rpath, lpath, recursive=False, **kwargs): + """Alias of `AbstractFileSystem.get`.""" + return self.get(rpath, lpath, recursive=recursive, **kwargs) + + def sign(self, path, expiration=100, **kwargs): + """Create a signed URL representing the given path + + Some implementations allow temporary URLs to be generated, as a + way of delegating credentials. + + Parameters + ---------- + path : str + The path on the filesystem + expiration : int + Number of seconds to enable the URL for (if supported) + + Returns + ------- + URL : str + The signed URL + + Raises + ------ + NotImplementedError : if method is not implemented for a filesystem + """ + raise NotImplementedError("Sign is not implemented for this filesystem") + + def _isfilestore(self): + # Originally inherited from pyarrow DaskFileSystem. Keeping this + # here for backwards compatibility as long as pyarrow uses its + # legacy fsspec-compatible filesystems and thus accepts fsspec + # filesystems as well + return False + + +class AbstractBufferedFile(io.IOBase): + """Convenient class to derive from to provide buffering + + In the case that the backend does not provide a pythonic file-like object + already, this class contains much of the logic to build one. The only + methods that need to be overridden are ``_upload_chunk``, + ``_initiate_upload`` and ``_fetch_range``. + """ + + DEFAULT_BLOCK_SIZE = 5 * 2**20 + _details = None + + def __init__( + self, + fs, + path, + mode="rb", + block_size="default", + autocommit=True, + cache_type="readahead", + cache_options=None, + size=None, + **kwargs, + ): + """ + Template for files with buffered reading and writing + + Parameters + ---------- + fs: instance of FileSystem + path: str + location in file-system + mode: str + Normal file modes. Currently only 'wb', 'ab' or 'rb'. Some file + systems may be read-only, and some may not support append. + block_size: int + Buffer size for reading or writing, 'default' for class default + autocommit: bool + Whether to write to final destination; may only impact what + happens when file is being closed. + cache_type: {"readahead", "none", "mmap", "bytes"}, default "readahead" + Caching policy in read mode. See the definitions in ``core``. + cache_options : dict + Additional options passed to the constructor for the cache specified + by `cache_type`. + size: int + If given and in read mode, suppressed having to look up the file size + kwargs: + Gets stored as self.kwargs + """ + from .core import caches + + self.path = path + self.fs = fs + self.mode = mode + self.blocksize = ( + self.DEFAULT_BLOCK_SIZE if block_size in ["default", None] else block_size + ) + self.loc = 0 + self.autocommit = autocommit + self.end = None + self.start = None + self.closed = False + + if cache_options is None: + cache_options = {} + + if "trim" in kwargs: + warnings.warn( + "Passing 'trim' to control the cache behavior has been deprecated. " + "Specify it within the 'cache_options' argument instead.", + FutureWarning, + ) + cache_options["trim"] = kwargs.pop("trim") + + self.kwargs = kwargs + + if mode not in {"ab", "rb", "wb", "xb"}: + raise NotImplementedError("File mode not supported") + if mode == "rb": + if size is not None: + self.size = size + else: + self.size = self.details["size"] + self.cache = caches[cache_type]( + self.blocksize, self._fetch_range, self.size, **cache_options + ) + else: + self.buffer = io.BytesIO() + self.offset = None + self.forced = False + self.location = None + + @property + def details(self): + if self._details is None: + self._details = self.fs.info(self.path) + return self._details + + @details.setter + def details(self, value): + self._details = value + self.size = value["size"] + + @property + def full_name(self): + return _unstrip_protocol(self.path, self.fs) + + @property + def closed(self): + # get around this attr being read-only in IOBase + # use getattr here, since this can be called during del + return getattr(self, "_closed", True) + + @closed.setter + def closed(self, c): + self._closed = c + + def __hash__(self): + if "w" in self.mode: + return id(self) + else: + return int(tokenize(self.details), 16) + + def __eq__(self, other): + """Files are equal if they have the same checksum, only in read mode""" + if self is other: + return True + return ( + isinstance(other, type(self)) + and self.mode == "rb" + and other.mode == "rb" + and hash(self) == hash(other) + ) + + def commit(self): + """Move from temp to final destination""" + + def discard(self): + """Throw away temporary file""" + + def info(self): + """File information about this path""" + if self.readable(): + return self.details + else: + raise ValueError("Info not available while writing") + + def tell(self): + """Current file location""" + return self.loc + + def seek(self, loc, whence=0): + """Set current file location + + Parameters + ---------- + loc: int + byte location + whence: {0, 1, 2} + from start of file, current location or end of file, resp. + """ + loc = int(loc) + if not self.mode == "rb": + raise OSError(ESPIPE, "Seek only available in read mode") + if whence == 0: + nloc = loc + elif whence == 1: + nloc = self.loc + loc + elif whence == 2: + nloc = self.size + loc + else: + raise ValueError(f"invalid whence ({whence}, should be 0, 1 or 2)") + if nloc < 0: + raise ValueError("Seek before start of file") + self.loc = nloc + return self.loc + + def write(self, data): + """ + Write data to buffer. + + Buffer only sent on flush() or if buffer is greater than + or equal to blocksize. + + Parameters + ---------- + data: bytes + Set of bytes to be written. + """ + if not self.writable(): + raise ValueError("File not in write mode") + if self.closed: + raise ValueError("I/O operation on closed file.") + if self.forced: + raise ValueError("This file has been force-flushed, can only close") + out = self.buffer.write(data) + self.loc += out + if self.buffer.tell() >= self.blocksize: + self.flush() + return out + + def flush(self, force=False): + """ + Write buffered data to backend store. + + Writes the current buffer, if it is larger than the block-size, or if + the file is being closed. + + Parameters + ---------- + force: bool + When closing, write the last block even if it is smaller than + blocks are allowed to be. Disallows further writing to this file. + """ + + if self.closed: + raise ValueError("Flush on closed file") + if force and self.forced: + raise ValueError("Force flush cannot be called more than once") + if force: + self.forced = True + + if self.readable(): + # no-op to flush on read-mode + return + + if not force and self.buffer.tell() < self.blocksize: + # Defer write on small block + return + + if self.offset is None: + # Initialize a multipart upload + self.offset = 0 + try: + self._initiate_upload() + except: + self.closed = True + raise + + if self._upload_chunk(final=force) is not False: + self.offset += self.buffer.seek(0, 2) + self.buffer = io.BytesIO() + + def _upload_chunk(self, final=False): + """Write one part of a multi-block file upload + + Parameters + ========== + final: bool + This is the last block, so should complete file, if + self.autocommit is True. + """ + # may not yet have been initialized, may need to call _initialize_upload + + def _initiate_upload(self): + """Create remote file/upload""" + pass + + def _fetch_range(self, start, end): + """Get the specified set of bytes from remote""" + return self.fs.cat_file(self.path, start=start, end=end) + + def read(self, length=-1): + """ + Return data from cache, or fetch pieces as necessary + + Parameters + ---------- + length: int (-1) + Number of bytes to read; if <0, all remaining bytes. + """ + length = -1 if length is None else int(length) + if self.mode != "rb": + raise ValueError("File not in read mode") + if length < 0: + length = self.size - self.loc + if self.closed: + raise ValueError("I/O operation on closed file.") + if length == 0: + # don't even bother calling fetch + return b"" + out = self.cache._fetch(self.loc, self.loc + length) + + logger.debug( + "%s read: %i - %i %s", + self, + self.loc, + self.loc + length, + self.cache._log_stats(), + ) + self.loc += len(out) + return out + + def readinto(self, b): + """mirrors builtin file's readinto method + + https://docs.python.org/3/library/io.html#io.RawIOBase.readinto + """ + out = memoryview(b).cast("B") + data = self.read(out.nbytes) + out[: len(data)] = data + return len(data) + + def readuntil(self, char=b"\n", blocks=None): + """Return data between current position and first occurrence of char + + char is included in the output, except if the end of the tile is + encountered first. + + Parameters + ---------- + char: bytes + Thing to find + blocks: None or int + How much to read in each go. Defaults to file blocksize - which may + mean a new read on every call. + """ + out = [] + while True: + start = self.tell() + part = self.read(blocks or self.blocksize) + if len(part) == 0: + break + found = part.find(char) + if found > -1: + out.append(part[: found + len(char)]) + self.seek(start + found + len(char)) + break + out.append(part) + return b"".join(out) + + def readline(self): + """Read until and including the first occurrence of newline character + + Note that, because of character encoding, this is not necessarily a + true line ending. + """ + return self.readuntil(b"\n") + + def __next__(self): + out = self.readline() + if out: + return out + raise StopIteration + + def __iter__(self): + return self + + def readlines(self): + """Return all data, split by the newline character, including the newline character""" + data = self.read() + lines = data.split(b"\n") + out = [l + b"\n" for l in lines[:-1]] + if data.endswith(b"\n"): + return out + else: + return out + [lines[-1]] + # return list(self) ??? + + def readinto1(self, b): + return self.readinto(b) + + def close(self): + """Close file + + Finalizes writes, discards cache + """ + if getattr(self, "_unclosable", False): + return + if self.closed: + return + try: + if self.mode == "rb": + self.cache = None + else: + if not self.forced: + self.flush(force=True) + + if self.fs is not None: + self.fs.invalidate_cache(self.path) + self.fs.invalidate_cache(self.fs._parent(self.path)) + finally: + self.closed = True + + def readable(self): + """Whether opened for reading""" + return "r" in self.mode and not self.closed + + def seekable(self): + """Whether is seekable (only in read mode)""" + return self.readable() + + def writable(self): + """Whether opened for writing""" + return self.mode in {"wb", "ab", "xb"} and not self.closed + + def __reduce__(self): + if self.mode != "rb": + raise RuntimeError("Pickling a writeable file is not supported") + + return reopen, ( + self.fs, + self.path, + self.mode, + self.blocksize, + self.loc, + self.size, + self.autocommit, + self.cache.name if self.cache else "none", + self.kwargs, + ) + + def __del__(self): + if not self.closed: + self.close() + + def __str__(self): + return f"" + + __repr__ = __str__ + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + +def reopen(fs, path, mode, blocksize, loc, size, autocommit, cache_type, kwargs): + file = fs.open( + path, + mode=mode, + block_size=blocksize, + autocommit=autocommit, + cache_type=cache_type, + size=size, + **kwargs, + ) + if loc > 0: + file.seek(loc) + return file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8ed2ad802ecaf021106c25c03112f29e75c7b2f8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py @@ -0,0 +1,289 @@ +import os +from hashlib import md5 + +import pytest + +from fsspec.implementations.local import LocalFileSystem +from fsspec.tests.abstract.copy import AbstractCopyTests # noqa: F401 +from fsspec.tests.abstract.get import AbstractGetTests # noqa: F401 +from fsspec.tests.abstract.open import AbstractOpenTests # noqa: F401 +from fsspec.tests.abstract.pipe import AbstractPipeTests # noqa: F401 +from fsspec.tests.abstract.put import AbstractPutTests # noqa: F401 + + +class BaseAbstractFixtures: + """ + Abstract base class containing fixtures that are used by but never need to + be overridden in derived filesystem-specific classes to run the abstract + tests on such filesystems. + """ + + @pytest.fixture + def fs_bulk_operations_scenario_0(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_glob_edge_cases_files(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_dir_and_file_with_same_name_prefix(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_10_files_with_hashed_names(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_target(self, fs, fs_join, fs_path): + """ + Return name of remote directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = fs_join(fs_path, "target") + yield target + if fs.exists(target): + fs.rm(target, recursive=True) + + @pytest.fixture + def local_bulk_operations_scenario_0(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_glob_edge_cases_files(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_dir_and_file_with_same_name_prefix( + self, local_fs, local_join, local_path + ): + """ + Scenario on local filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix( + local_fs, local_join, local_path + ) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_10_files_with_hashed_names(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_target(self, local_fs, local_join, local_path): + """ + Return name of local directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = local_join(local_path, "target") + yield target + if local_fs.exists(target): + local_fs.rm(target, recursive=True) + + def _glob_edge_cases_files(self, some_fs, some_join, some_path): + """ + Scenario that is used for glob edge cases cp/get/put tests. + Creates the following directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + ├── 📁 subdir0 + │ ├── 📄 subfile1 + │ ├── 📄 subfile2 + │ └── 📁 nesteddir + │ └── 📄 nestedfile + └── 📁 subdir1 + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + + for subdir_idx in range(2): + subdir = some_join(source, f"subdir{subdir_idx}") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + + return source + + def _bulk_operations_scenario_0(self, some_fs, some_join, some_path): + """ + Scenario that is used for many cp/get/put tests. Creates the following + directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + └── 📁 subdir + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + return source + + def _dir_and_file_with_same_name_prefix(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put on directory and file with + the same name prefixes. Creates the following directory and file structure: + + 📁 source + ├── 📄 subdir.txt + └── 📁 subdir + └── 📄 subfile.txt + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + file = some_join(source, "subdir.txt") + subfile = some_join(subdir, "subfile.txt") + some_fs.makedirs(subdir) + some_fs.touch(file) + some_fs.touch(subfile) + return source + + def _10_files_with_hashed_names(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put files order when source and + destination are lists. Creates the following directory and file structure: + + 📁 source + └── 📄 {hashed([0-9])}.txt + """ + source = some_join(some_path, "source") + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + path = some_join(source, f"{hashed_i}.txt") + some_fs.pipe(path=path, value=f"{i}".encode()) + return source + + +class AbstractFixtures(BaseAbstractFixtures): + """ + Abstract base class containing fixtures that may be overridden in derived + filesystem-specific classes to run the abstract tests on such filesystems. + + For any particular filesystem some of these fixtures must be overridden, + such as ``fs`` and ``fs_path``, and others may be overridden if the + default functions here are not appropriate, such as ``fs_join``. + """ + + @pytest.fixture + def fs(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture + def fs_join(self): + """ + Return a function that joins its arguments together into a path. + + Most fsspec implementations join paths in a platform-dependent way, + but some will override this to always use a forward slash. + """ + return os.path.join + + @pytest.fixture + def fs_path(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture(scope="class") + def local_fs(self): + # Maybe need an option for auto_mkdir=False? This is only relevant + # for certain implementations. + return LocalFileSystem(auto_mkdir=True) + + @pytest.fixture + def local_join(self): + """ + Return a function that joins its arguments together into a path, on + the local filesystem. + """ + return os.path.join + + @pytest.fixture + def local_path(self, tmpdir): + return tmpdir + + @pytest.fixture + def supports_empty_directories(self): + """ + Return whether this implementation supports empty directories. + """ + return True + + @pytest.fixture + def fs_sanitize_path(self): + return lambda x: x diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/common.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/common.py new file mode 100644 index 0000000000000000000000000000000000000000..22e7c4140404ab2a8928689721419cf05c2760b9 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/common.py @@ -0,0 +1,175 @@ +GLOB_EDGE_CASES_TESTS = { + "argnames": ("path", "recursive", "maxdepth", "expected"), + "argvalues": [ + ("fil?1", False, None, ["file1"]), + ("fil?1", True, None, ["file1"]), + ("file[1-2]", False, None, ["file1", "file2"]), + ("file[1-2]", True, None, ["file1", "file2"]), + ("*", False, None, ["file1", "file2"]), + ( + "*", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*", True, 1, ["file1", "file2"]), + ( + "*", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("*1", False, None, ["file1"]), + ( + "*1", + True, + None, + [ + "file1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*1", True, 2, ["file1", "subdir1/subfile1", "subdir1/subfile2"]), + ( + "**", + False, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**", True, 1, ["file1", "file2"]), + ( + "**", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + False, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("**/*1", False, None, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ( + "**/*1", + True, + None, + [ + "file1", + "subdir0/subfile1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**/*1", True, 1, ["file1"]), + ( + "**/*1", + True, + 2, + ["file1", "subdir0/subfile1", "subdir1/subfile1", "subdir1/subfile2"], + ), + ("**/*1", False, 2, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ("**/subdir0", False, None, []), + ("**/subdir0", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("**/subdir0/nested*", False, 2, []), + ("**/subdir0/nested*", True, 2, ["nestedfile"]), + ("subdir[1-2]", False, None, []), + ("subdir[1-2]", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("subdir[1-2]", True, 2, ["subfile1", "subfile2"]), + ("subdir[0-1]", False, None, []), + ( + "subdir[0-1]", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + False, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ], +} diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py new file mode 100644 index 0000000000000000000000000000000000000000..e39e57e5f7d52bfda8ab5e2398b04cc2303630a0 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py @@ -0,0 +1,557 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractCopyTests: + def test_copy_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.cp(fs_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.cp(fs_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.cp(fs_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.cp(fs_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_copy_file_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.cp(fs_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_copy_file_to_file_in_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir", "newfile") + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_copy_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.cp(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.cp(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.cp(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.cp(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_copy_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_glob_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_copy_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + fs_target, + fs_sanitize_path, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.copy(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_copy_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.cp(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_list_of_files_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.cp(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_two_files_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # This is a duplicate of test_copy_list_of_files_to_new_directory and + # can eventually be removed. + source = fs_bulk_operations_scenario_0 + + target = fs_target + assert not fs.exists(target) + fs.cp([fs_join(source, "file1"), fs_join(source, "file2")], target) + + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + + def test_copy_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + fs_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.cp(fs_join(source, "subdir"), target, recursive=True) + + assert fs.isfile(fs_join(target, "subfile.txt")) + assert not fs.isfile(fs_join(target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.cp(fs_join(source, "subdir*"), target, recursive=True) + + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, fs_10_files_with_hashed_names + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.copy(path1=source_files, path2=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/get.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/get.py new file mode 100644 index 0000000000000000000000000000000000000000..851ab81ee581e74cac41c64c83ef0af75826d6b0 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/get.py @@ -0,0 +1,587 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.implementations.local import make_path_posix +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractGetTests: + def test_get_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + target_file2 = local_join(target, "file2") + target_subfile1 = local_join(target, "subfile1") + + # Copy from source directory + fs.get(fs_join(source, "file2"), target) + assert local_fs.isfile(target_file2) + + # Copy from sub directory + fs.get(fs_join(source, "subdir", "subfile1"), target) + assert local_fs.isfile(target_subfile1) + + # Remove copied files + local_fs.rm([target_file2, target_subfile1]) + assert not local_fs.exists(target_file2) + assert not local_fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.get(fs_join(source, "file2"), target + "/") + assert local_fs.isdir(target) + assert local_fs.isfile(target_file2) + + fs.get(fs_join(source, "subdir", "subfile1"), target + "/") + assert local_fs.isfile(target_subfile1) + + def test_get_file_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), local_join(target, "newdir/") + ) # Note trailing slash + + assert local_fs.isdir(target) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get(fs_join(source, "subdir", "subfile1"), local_join(target, "newfile")) + assert local_fs.isfile(local_join(target, "newfile")) + + def test_get_file_to_file_in_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), + local_join(target, "newdir", "newfile"), + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "newfile")) + + def test_get_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert local_fs.isdir(local_join(target, "subdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "subdir", "nesteddir", "nestedfile") + ) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert not local_fs.exists(local_join(target, "subdir", "nesteddir")) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + def test_get_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = local_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + def test_get_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.isdir(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_glob_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_fs.ls(target, detail=False), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_get_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = local_target + + for new_dir, target_slash in product([True, False], [True, False]): + local_fs.mkdir(target) + + t = local_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.get(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = local_fs.find(target) + if new_dir: + prefixed_expected = [ + make_path_posix(local_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + make_path_posix(local_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + local_fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_get_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.get(source_files, t) + assert local_fs.isfile(local_join(target, "file1")) + assert local_fs.isfile(local_join(target, "file2")) + assert local_fs.isfile(local_join(target, "subfile1")) + + local_fs.rm( + [ + local_join(target, "file1"), + local_join(target, "file2"), + local_join(target, "subfile1"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_list_of_files_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.get(source_files, local_join(target, "newdir") + "/") # Note trailing slash + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "file1")) + assert local_fs.isfile(local_join(target, "newdir", "file2")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_directory_recursive( + self, fs, fs_join, fs_path, local_fs, local_join, local_target + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = fs_join(fs_path, "src") + src_file = fs_join(src, "file") + fs.mkdir(src) + fs.touch(src_file) + + target = local_target + + # get without slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src, target, recursive=True) + assert local_fs.isdir(target) + + if loop == 0: + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + else: + assert local_fs.isfile(local_join(target, "file")) + assert local_fs.isdir(local_join(target, "src")) + assert local_fs.isfile(local_join(target, "src", "file")) + + local_fs.rm(target, recursive=True) + + # get with slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src + "/", target, recursive=True) + assert local_fs.isdir(target) + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + + def test_get_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_dir_and_file_with_same_name_prefix, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = local_target + + # Test without glob + fs.get(fs_join(source, "subdir"), target, recursive=True) + + assert local_fs.isfile(local_join(target, "subfile.txt")) + assert not local_fs.isfile(local_join(target, "subdir.txt")) + + local_fs.rm([local_join(target, "subfile.txt")]) + assert local_fs.ls(target) == [] + + # Test with glob + fs.get(fs_join(source, "subdir*"), target, recursive=True) + + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile.txt")) + assert local_fs.isfile(local_join(target, "subdir.txt")) + + def test_get_with_source_and_destination_as_list( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_10_files_with_hashed_names, + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = local_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append( + make_path_posix(local_join(target, f"{hashed_i}.txt")) + ) + + # Copy and assert order was kept + fs.get(rpath=source_files, lpath=destination_files) + + for i in range(10): + file_content = local_fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py new file mode 100644 index 0000000000000000000000000000000000000000..39f6caa3de815e024fa84de2acecc986c823ed29 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py @@ -0,0 +1,57 @@ +import os + +import pytest + +import fsspec + + +def test_move_raises_error_with_tmpdir(tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Define a destination that simulates a protected or invalid path + destination = tmpdir.join("non_existent_directory/destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Use the actual file paths as string + with pytest.raises(FileNotFoundError): + fs.mv(str(source), str(destination)) + + +@pytest.mark.parametrize("recursive", (True, False)) +def test_move_raises_error_with_tmpdir_permission(recursive, tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Create a protected directory (non-writable) + protected_dir = tmpdir.mkdir("protected_directory") + protected_path = str(protected_dir) + + # Set the directory to read-only + if os.name == "nt": + os.system(f'icacls "{protected_path}" /deny Everyone:(W)') + else: + os.chmod(protected_path, 0o555) # Sets the directory to read-only + + # Define a destination inside the protected directory + destination = protected_dir.join("destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Try to move the file to the read-only directory, expecting a permission error + with pytest.raises(PermissionError): + fs.mv(str(source), str(destination), recursive=recursive) + + # Assert the file was not created in the destination + assert not os.path.exists(destination) + + # Cleanup: Restore permissions so the directory can be cleaned up + if os.name == "nt": + os.system(f'icacls "{protected_path}" /remove:d Everyone') + else: + os.chmod(protected_path, 0o755) # Restore write permission for cleanup diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/open.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/open.py new file mode 100644 index 0000000000000000000000000000000000000000..bb75ea852276fb8d834345883813b8e27a0ae24c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/open.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractOpenTests: + def test_open_exclusive(self, fs, fs_target): + with fs.open(fs_target, "wb") as f: + f.write(b"data") + with fs.open(fs_target, "rb") as f: + assert f.read() == b"data" + with pytest.raises(FileExistsError): + fs.open(fs_target, "xb") diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py new file mode 100644 index 0000000000000000000000000000000000000000..8ecca96e9d23ff268a253c48269d5cca451ea270 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractPipeTests: + def test_pipe_exclusive(self, fs, fs_target): + fs.pipe_file(fs_target, b"data") + assert fs.cat_file(fs_target) == b"data" + with pytest.raises(FileExistsError): + fs.pipe_file(fs_target, b"data", mode="create") + fs.pipe_file(fs_target, b"new data", mode="overwrite") + assert fs.cat_file(fs_target) == b"new data" diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/put.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/put.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc349977f0384d9fc86126498be5c6ad99a21d3 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/tests/abstract/put.py @@ -0,0 +1,591 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractPutTests: + def test_put_file_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.put(local_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.put(local_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.put(local_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.put(local_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_put_file_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1c + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.put(local_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_put_file_to_file_in_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1d + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), + fs_join(target, "newdir", "newfile"), + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_put_directory_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1e + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.put(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.put(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_directory_to_new_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1f + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.put(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.put(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_put_glob_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1g + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_glob_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1h + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_put_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_target, + local_glob_edge_cases_files, + local_join, + fs_sanitize_path, + ): + # Copy scenario 1g + source = local_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.put(local_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_put_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 2a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.put(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_list_of_files_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 2b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + fs.put(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_directory_recursive( + self, fs, fs_join, fs_target, local_fs, local_join, local_path + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = local_join(local_path, "src") + src_file = local_join(src, "file") + local_fs.mkdir(src) + local_fs.touch(src_file) + + target = fs_target + + # put without slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src, target, recursive=True) + assert fs.isdir(target) + + if loop == 0: + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + else: + assert fs.isfile(fs_join(target, "file")) + assert fs.isdir(fs_join(target, "src")) + assert fs.isfile(fs_join(target, "src", "file")) + + fs.rm(target, recursive=True) + + # put with slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src + "/", target, recursive=True) + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + + def test_put_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + local_join, + local_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = local_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.put(local_join(source, "subdir"), fs_target, recursive=True) + + assert fs.isfile(fs_join(fs_target, "subfile.txt")) + assert not fs.isfile(fs_join(fs_target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.put(local_join(source, "subdir*"), fs_target, recursive=True) + + assert fs.isdir(fs_join(fs_target, "subdir")) + assert fs.isfile(fs_join(fs_target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(fs_target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, local_join, local_10_files_with_hashed_names + ): + # Create the test dir + source = local_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(local_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.put(lpath=source_files, rpath=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/transaction.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/transaction.py new file mode 100644 index 0000000000000000000000000000000000000000..77293f63ecc5f611e19d849ef236d53e9c258efc --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/transaction.py @@ -0,0 +1,90 @@ +from collections import deque + + +class Transaction: + """Filesystem transaction write context + + Gathers files for deferred commit or discard, so that several write + operations can be finalized semi-atomically. This works by having this + instance as the ``.transaction`` attribute of the given filesystem + """ + + def __init__(self, fs, **kwargs): + """ + Parameters + ---------- + fs: FileSystem instance + """ + self.fs = fs + self.files = deque() + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """End transaction and commit, if exit is not due to exception""" + # only commit if there was no exception + self.complete(commit=exc_type is None) + if self.fs: + self.fs._intrans = False + self.fs._transaction = None + self.fs = None + + def start(self): + """Start a transaction on this FileSystem""" + self.files = deque() # clean up after previous failed completions + self.fs._intrans = True + + def complete(self, commit=True): + """Finish transaction: commit or discard all deferred files""" + while self.files: + f = self.files.popleft() + if commit: + f.commit() + else: + f.discard() + self.fs._intrans = False + self.fs._transaction = None + self.fs = None + + +class FileActor: + def __init__(self): + self.files = [] + + def commit(self): + for f in self.files: + f.commit() + self.files.clear() + + def discard(self): + for f in self.files: + f.discard() + self.files.clear() + + def append(self, f): + self.files.append(f) + + +class DaskTransaction(Transaction): + def __init__(self, fs): + """ + Parameters + ---------- + fs: FileSystem instance + """ + import distributed + + super().__init__(fs) + client = distributed.default_client() + self.files = client.submit(FileActor, actor=True).result() + + def complete(self, commit=True): + """Finish transaction: commit or discard all deferred files""" + if commit: + self.files.commit().result() + else: + self.files.discard().result() + self.fs._intrans = False + self.fs = None diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/utils.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6441c5b1d3a766e7911161e54c6d7fba8eb3032e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/fsspec/utils.py @@ -0,0 +1,737 @@ +from __future__ import annotations + +import contextlib +import logging +import math +import os +import re +import sys +import tempfile +from collections.abc import Iterable, Iterator, Sequence +from functools import partial +from hashlib import md5 +from importlib.metadata import version +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + TypeVar, +) +from urllib.parse import urlsplit + +if TYPE_CHECKING: + import pathlib + + from typing_extensions import TypeGuard + + from fsspec.spec import AbstractFileSystem + + +DEFAULT_BLOCK_SIZE = 5 * 2**20 + +T = TypeVar("T") + + +def infer_storage_options( + urlpath: str, inherit_storage_options: dict[str, Any] | None = None +) -> dict[str, Any]: + """Infer storage options from URL path and merge it with existing storage + options. + + Parameters + ---------- + urlpath: str or unicode + Either local absolute file path or URL (hdfs://namenode:8020/file.csv) + inherit_storage_options: dict (optional) + Its contents will get merged with the inferred information from the + given path + + Returns + ------- + Storage options dict. + + Examples + -------- + >>> infer_storage_options('/mnt/datasets/test.csv') # doctest: +SKIP + {"protocol": "file", "path", "/mnt/datasets/test.csv"} + >>> infer_storage_options( + ... 'hdfs://username:pwd@node:123/mnt/datasets/test.csv?q=1', + ... inherit_storage_options={'extra': 'value'}, + ... ) # doctest: +SKIP + {"protocol": "hdfs", "username": "username", "password": "pwd", + "host": "node", "port": 123, "path": "/mnt/datasets/test.csv", + "url_query": "q=1", "extra": "value"} + """ + # Handle Windows paths including disk name in this special case + if ( + re.match(r"^[a-zA-Z]:[\\/]", urlpath) + or re.match(r"^[a-zA-Z0-9]+://", urlpath) is None + ): + return {"protocol": "file", "path": urlpath} + + parsed_path = urlsplit(urlpath) + protocol = parsed_path.scheme or "file" + if parsed_path.fragment: + path = "#".join([parsed_path.path, parsed_path.fragment]) + else: + path = parsed_path.path + if protocol == "file": + # Special case parsing file protocol URL on Windows according to: + # https://msdn.microsoft.com/en-us/library/jj710207.aspx + windows_path = re.match(r"^/([a-zA-Z])[:|]([\\/].*)$", path) + if windows_path: + drive, path = windows_path.groups() + path = f"{drive}:{path}" + + if protocol in ["http", "https"]: + # for HTTP, we don't want to parse, as requests will anyway + return {"protocol": protocol, "path": urlpath} + + options: dict[str, Any] = {"protocol": protocol, "path": path} + + if parsed_path.netloc: + # Parse `hostname` from netloc manually because `parsed_path.hostname` + # lowercases the hostname which is not always desirable (e.g. in S3): + # https://github.com/dask/dask/issues/1417 + options["host"] = parsed_path.netloc.rsplit("@", 1)[-1].rsplit(":", 1)[0] + + if protocol in ("s3", "s3a", "gcs", "gs"): + options["path"] = options["host"] + options["path"] + else: + options["host"] = options["host"] + if parsed_path.port: + options["port"] = parsed_path.port + if parsed_path.username: + options["username"] = parsed_path.username + if parsed_path.password: + options["password"] = parsed_path.password + + if parsed_path.query: + options["url_query"] = parsed_path.query + if parsed_path.fragment: + options["url_fragment"] = parsed_path.fragment + + if inherit_storage_options: + update_storage_options(options, inherit_storage_options) + + return options + + +def update_storage_options( + options: dict[str, Any], inherited: dict[str, Any] | None = None +) -> None: + if not inherited: + inherited = {} + collisions = set(options) & set(inherited) + if collisions: + for collision in collisions: + if options.get(collision) != inherited.get(collision): + raise KeyError( + f"Collision between inferred and specified storage " + f"option:\n{collision}" + ) + options.update(inherited) + + +# Compression extensions registered via fsspec.compression.register_compression +compressions: dict[str, str] = {} + + +def infer_compression(filename: str) -> str | None: + """Infer compression, if available, from filename. + + Infer a named compression type, if registered and available, from filename + extension. This includes builtin (gz, bz2, zip) compressions, as well as + optional compressions. See fsspec.compression.register_compression. + """ + extension = os.path.splitext(filename)[-1].strip(".").lower() + if extension in compressions: + return compressions[extension] + return None + + +def build_name_function(max_int: float) -> Callable[[int], str]: + """Returns a function that receives a single integer + and returns it as a string padded by enough zero characters + to align with maximum possible integer + + >>> name_f = build_name_function(57) + + >>> name_f(7) + '07' + >>> name_f(31) + '31' + >>> build_name_function(1000)(42) + '0042' + >>> build_name_function(999)(42) + '042' + >>> build_name_function(0)(0) + '0' + """ + # handle corner cases max_int is 0 or exact power of 10 + max_int += 1e-8 + + pad_length = int(math.ceil(math.log10(max_int))) + + def name_function(i: int) -> str: + return str(i).zfill(pad_length) + + return name_function + + +def seek_delimiter(file: IO[bytes], delimiter: bytes, blocksize: int) -> bool: + r"""Seek current file to file start, file end, or byte after delimiter seq. + + Seeks file to next chunk delimiter, where chunks are defined on file start, + a delimiting sequence, and file end. Use file.tell() to see location afterwards. + Note that file start is a valid split, so must be at offset > 0 to seek for + delimiter. + + Parameters + ---------- + file: a file + delimiter: bytes + a delimiter like ``b'\n'`` or message sentinel, matching file .read() type + blocksize: int + Number of bytes to read from the file at once. + + + Returns + ------- + Returns True if a delimiter was found, False if at file start or end. + + """ + + if file.tell() == 0: + # beginning-of-file, return without seek + return False + + # Interface is for binary IO, with delimiter as bytes, but initialize last + # with result of file.read to preserve compatibility with text IO. + last: bytes | None = None + while True: + current = file.read(blocksize) + if not current: + # end-of-file without delimiter + return False + full = last + current if last else current + try: + if delimiter in full: + i = full.index(delimiter) + file.seek(file.tell() - (len(full) - i) + len(delimiter)) + return True + elif len(current) < blocksize: + # end-of-file without delimiter + return False + except (OSError, ValueError): + pass + last = full[-len(delimiter) :] + + +def read_block( + f: IO[bytes], + offset: int, + length: int | None, + delimiter: bytes | None = None, + split_before: bool = False, +) -> bytes: + """Read a block of bytes from a file + + Parameters + ---------- + f: File + Open file + offset: int + Byte offset to start read + length: int + Number of bytes to read, read through end of file if None + delimiter: bytes (optional) + Ensure reading starts and stops at delimiter bytestring + split_before: bool (optional) + Start/stop read *before* delimiter bytestring. + + + If using the ``delimiter=`` keyword argument we ensure that the read + starts and stops at delimiter boundaries that follow the locations + ``offset`` and ``offset + length``. If ``offset`` is zero then we + start at zero, regardless of delimiter. The bytestring returned WILL + include the terminating delimiter string. + + Examples + -------- + + >>> from io import BytesIO # doctest: +SKIP + >>> f = BytesIO(b'Alice, 100\\nBob, 200\\nCharlie, 300') # doctest: +SKIP + >>> read_block(f, 0, 13) # doctest: +SKIP + b'Alice, 100\\nBo' + + >>> read_block(f, 0, 13, delimiter=b'\\n') # doctest: +SKIP + b'Alice, 100\\nBob, 200\\n' + + >>> read_block(f, 10, 10, delimiter=b'\\n') # doctest: +SKIP + b'Bob, 200\\nCharlie, 300' + """ + if delimiter: + f.seek(offset) + found_start_delim = seek_delimiter(f, delimiter, 2**16) + if length is None: + return f.read() + start = f.tell() + length -= start - offset + + f.seek(start + length) + found_end_delim = seek_delimiter(f, delimiter, 2**16) + end = f.tell() + + # Adjust split location to before delimiter if seek found the + # delimiter sequence, not start or end of file. + if found_start_delim and split_before: + start -= len(delimiter) + + if found_end_delim and split_before: + end -= len(delimiter) + + offset = start + length = end - start + + f.seek(offset) + + # TODO: allow length to be None and read to the end of the file? + assert length is not None + b = f.read(length) + return b + + +def tokenize(*args: Any, **kwargs: Any) -> str: + """Deterministic token + + (modified from dask.base) + + >>> tokenize([1, 2, '3']) + '9d71491b50023b06fc76928e6eddb952' + + >>> tokenize('Hello') == tokenize('Hello') + True + """ + if kwargs: + args += (kwargs,) + try: + h = md5(str(args).encode()) + except ValueError: + # FIPS systems: https://github.com/fsspec/filesystem_spec/issues/380 + h = md5(str(args).encode(), usedforsecurity=False) + return h.hexdigest() + + +def stringify_path(filepath: str | os.PathLike[str] | pathlib.Path) -> str: + """Attempt to convert a path-like object to a string. + + Parameters + ---------- + filepath: object to be converted + + Returns + ------- + filepath_str: maybe a string version of the object + + Notes + ----- + Objects supporting the fspath protocol are coerced according to its + __fspath__ method. + + For backwards compatibility with older Python version, pathlib.Path + objects are specially coerced. + + Any other object is passed through unchanged, which includes bytes, + strings, buffers, or anything else that's not even path-like. + """ + if isinstance(filepath, str): + return filepath + elif hasattr(filepath, "__fspath__"): + return filepath.__fspath__() + elif hasattr(filepath, "path"): + return filepath.path + else: + return filepath # type: ignore[return-value] + + +def make_instance( + cls: Callable[..., T], args: Sequence[Any], kwargs: dict[str, Any] +) -> T: + inst = cls(*args, **kwargs) + inst._determine_worker() # type: ignore[attr-defined] + return inst + + +def common_prefix(paths: Iterable[str]) -> str: + """For a list of paths, find the shortest prefix common to all""" + parts = [p.split("/") for p in paths] + lmax = min(len(p) for p in parts) + end = 0 + for i in range(lmax): + end = all(p[i] == parts[0][i] for p in parts) + if not end: + break + i += end + return "/".join(parts[0][:i]) + + +def other_paths( + paths: list[str], + path2: str | list[str], + exists: bool = False, + flatten: bool = False, +) -> list[str]: + """In bulk file operations, construct a new file tree from a list of files + + Parameters + ---------- + paths: list of str + The input file tree + path2: str or list of str + Root to construct the new list in. If this is already a list of str, we just + assert it has the right number of elements. + exists: bool (optional) + For a str destination, it is already exists (and is a dir), files should + end up inside. + flatten: bool (optional) + Whether to flatten the input directory tree structure so that the output files + are in the same directory. + + Returns + ------- + list of str + """ + + if isinstance(path2, str): + path2 = path2.rstrip("/") + + if flatten: + path2 = ["/".join((path2, p.split("/")[-1])) for p in paths] + else: + cp = common_prefix(paths) + if exists: + cp = cp.rsplit("/", 1)[0] + if not cp and all(not s.startswith("/") for s in paths): + path2 = ["/".join([path2, p]) for p in paths] + else: + path2 = [p.replace(cp, path2, 1) for p in paths] + else: + assert len(paths) == len(path2) + return path2 + + +def is_exception(obj: Any) -> bool: + return isinstance(obj, BaseException) + + +def isfilelike(f: Any) -> TypeGuard[IO[bytes]]: + return all(hasattr(f, attr) for attr in ["read", "close", "tell"]) + + +def get_protocol(url: str) -> str: + url = stringify_path(url) + parts = re.split(r"(\:\:|\://)", url, maxsplit=1) + if len(parts) > 1: + return parts[0] + return "file" + + +def can_be_local(path: str) -> bool: + """Can the given URL be used with open_local?""" + from fsspec import get_filesystem_class + + try: + return getattr(get_filesystem_class(get_protocol(path)), "local_file", False) + except (ValueError, ImportError): + # not in registry or import failed + return False + + +def get_package_version_without_import(name: str) -> str | None: + """For given package name, try to find the version without importing it + + Import and package.__version__ is still the backup here, so an import + *might* happen. + + Returns either the version string, or None if the package + or the version was not readily found. + """ + if name in sys.modules: + mod = sys.modules[name] + if hasattr(mod, "__version__"): + return mod.__version__ + try: + return version(name) + except: # noqa: E722 + pass + try: + import importlib + + mod = importlib.import_module(name) + return mod.__version__ + except (ImportError, AttributeError): + return None + + +def setup_logging( + logger: logging.Logger | None = None, + logger_name: str | None = None, + level: str = "DEBUG", + clear: bool = True, +) -> logging.Logger: + if logger is None and logger_name is None: + raise ValueError("Provide either logger object or logger name") + logger = logger or logging.getLogger(logger_name) + handle = logging.StreamHandler() + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s -- %(message)s" + ) + handle.setFormatter(formatter) + if clear: + logger.handlers.clear() + logger.addHandler(handle) + logger.setLevel(level) + return logger + + +def _unstrip_protocol(name: str, fs: AbstractFileSystem) -> str: + return fs.unstrip_protocol(name) + + +def mirror_from( + origin_name: str, methods: Iterable[str] +) -> Callable[[type[T]], type[T]]: + """Mirror attributes and methods from the given + origin_name attribute of the instance to the + decorated class""" + + def origin_getter(method: str, self: Any) -> Any: + origin = getattr(self, origin_name) + return getattr(origin, method) + + def wrapper(cls: type[T]) -> type[T]: + for method in methods: + wrapped_method = partial(origin_getter, method) + setattr(cls, method, property(wrapped_method)) + return cls + + return wrapper + + +@contextlib.contextmanager +def nullcontext(obj: T) -> Iterator[T]: + yield obj + + +def merge_offset_ranges( + paths: list[str], + starts: list[int] | int, + ends: list[int] | int, + max_gap: int = 0, + max_block: int | None = None, + sort: bool = True, +) -> tuple[list[str], list[int], list[int]]: + """Merge adjacent byte-offset ranges when the inter-range + gap is <= `max_gap`, and when the merged byte range does not + exceed `max_block` (if specified). By default, this function + will re-order the input paths and byte ranges to ensure sorted + order. If the user can guarantee that the inputs are already + sorted, passing `sort=False` will skip the re-ordering. + """ + # Check input + if not isinstance(paths, list): + raise TypeError + if not isinstance(starts, list): + starts = [starts] * len(paths) + if not isinstance(ends, list): + ends = [ends] * len(paths) + if len(starts) != len(paths) or len(ends) != len(paths): + raise ValueError + + # Early Return + if len(starts) <= 1: + return paths, starts, ends + + starts = [s or 0 for s in starts] + # Sort by paths and then ranges if `sort=True` + if sort: + paths, starts, ends = ( + list(v) + for v in zip( + *sorted( + zip(paths, starts, ends), + ) + ) + ) + + if paths: + # Loop through the coupled `paths`, `starts`, and + # `ends`, and merge adjacent blocks when appropriate + new_paths = paths[:1] + new_starts = starts[:1] + new_ends = ends[:1] + for i in range(1, len(paths)): + if paths[i] == paths[i - 1] and new_ends[-1] is None: + continue + elif ( + paths[i] != paths[i - 1] + or ((starts[i] - new_ends[-1]) > max_gap) + or (max_block is not None and (ends[i] - new_starts[-1]) > max_block) + ): + # Cannot merge with previous block. + # Add new `paths`, `starts`, and `ends` elements + new_paths.append(paths[i]) + new_starts.append(starts[i]) + new_ends.append(ends[i]) + else: + # Merge with previous block by updating the + # last element of `ends` + new_ends[-1] = ends[i] + return new_paths, new_starts, new_ends + + # `paths` is empty. Just return input lists + return paths, starts, ends + + +def file_size(filelike: IO[bytes]) -> int: + """Find length of any open read-mode file-like""" + pos = filelike.tell() + try: + return filelike.seek(0, 2) + finally: + filelike.seek(pos) + + +@contextlib.contextmanager +def atomic_write(path: str, mode: str = "wb"): + """ + A context manager that opens a temporary file next to `path` and, on exit, + replaces `path` with the temporary file, thereby updating `path` + atomically. + """ + fd, fn = tempfile.mkstemp( + dir=os.path.dirname(path), prefix=os.path.basename(path) + "-" + ) + try: + with open(fd, mode) as fp: + yield fp + except BaseException: + with contextlib.suppress(FileNotFoundError): + os.unlink(fn) + raise + else: + os.replace(fn, path) + + +def _translate(pat, STAR, QUESTION_MARK): + # Copied from: https://github.com/python/cpython/pull/106703. + res: list[str] = [] + add = res.append + i, n = 0, len(pat) + while i < n: + c = pat[i] + i = i + 1 + if c == "*": + # compress consecutive `*` into one + if (not res) or res[-1] is not STAR: + add(STAR) + elif c == "?": + add(QUESTION_MARK) + elif c == "[": + j = i + if j < n and pat[j] == "!": + j = j + 1 + if j < n and pat[j] == "]": + j = j + 1 + while j < n and pat[j] != "]": + j = j + 1 + if j >= n: + add("\\[") + else: + stuff = pat[i:j] + if "-" not in stuff: + stuff = stuff.replace("\\", r"\\") + else: + chunks = [] + k = i + 2 if pat[i] == "!" else i + 1 + while True: + k = pat.find("-", k, j) + if k < 0: + break + chunks.append(pat[i:k]) + i = k + 1 + k = k + 3 + chunk = pat[i:j] + if chunk: + chunks.append(chunk) + else: + chunks[-1] += "-" + # Remove empty ranges -- invalid in RE. + for k in range(len(chunks) - 1, 0, -1): + if chunks[k - 1][-1] > chunks[k][0]: + chunks[k - 1] = chunks[k - 1][:-1] + chunks[k][1:] + del chunks[k] + # Escape backslashes and hyphens for set difference (--). + # Hyphens that create ranges shouldn't be escaped. + stuff = "-".join( + s.replace("\\", r"\\").replace("-", r"\-") for s in chunks + ) + # Escape set operations (&&, ~~ and ||). + stuff = re.sub(r"([&~|])", r"\\\1", stuff) + i = j + 1 + if not stuff: + # Empty range: never match. + add("(?!)") + elif stuff == "!": + # Negated empty range: match any character. + add(".") + else: + if stuff[0] == "!": + stuff = "^" + stuff[1:] + elif stuff[0] in ("^", "["): + stuff = "\\" + stuff + add(f"[{stuff}]") + else: + add(re.escape(c)) + assert i == n + return res + + +def glob_translate(pat): + # Copied from: https://github.com/python/cpython/pull/106703. + # The keyword parameters' values are fixed to: + # recursive=True, include_hidden=True, seps=None + """Translate a pathname with shell wildcards to a regular expression.""" + if os.path.altsep: + seps = os.path.sep + os.path.altsep + else: + seps = os.path.sep + escaped_seps = "".join(map(re.escape, seps)) + any_sep = f"[{escaped_seps}]" if len(seps) > 1 else escaped_seps + not_sep = f"[^{escaped_seps}]" + one_last_segment = f"{not_sep}+" + one_segment = f"{one_last_segment}{any_sep}" + any_segments = f"(?:.+{any_sep})?" + any_last_segments = ".*" + results = [] + parts = re.split(any_sep, pat) + last_part_idx = len(parts) - 1 + for idx, part in enumerate(parts): + if part == "*": + results.append(one_segment if idx < last_part_idx else one_last_segment) + continue + if part == "**": + results.append(any_segments if idx < last_part_idx else any_last_segments) + continue + elif "**" in part: + raise ValueError( + "Invalid pattern: '**' can only be an entire path component" + ) + if part: + results.extend(_translate(part, f"{not_sep}*", not_sep)) + if idx < last_part_idx: + results.append(any_sep) + res = "".join(results) + return rf"(?s:{res})\Z" diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/functorch/_C.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/functorch/_C.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..b52ac86519bea29a92e0ed3fafb2b66ab1019744 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/functorch/_C.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d651b5136f3f5590a41091eda5964279e1eeae237a2324c0e3548b4c088b43b +size 316336 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/INSTALLER b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/METADATA b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..5818532f37e4b71875355f59bc62e530b3e88def --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/METADATA @@ -0,0 +1,116 @@ +Metadata-Version: 2.4 +Name: greenlet +Version: 3.2.3 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +Author: Alexey Borzenkov +Author-email: snaury@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@seecoresoftware.com +License: MIT AND Python-2.0 +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Project-URL: Changes, https://greenlet.readthedocs.io/en/latest/changes.html +Keywords: greenlet coroutine concurrency threads cooperative +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +Provides-Extra: docs +Requires-Dist: Sphinx; extra == "docs" +Requires-Dist: furo; extra == "docs" +Provides-Extra: test +Requires-Dist: objgraph; extra == "test" +Requires-Dist: psutil; extra == "test" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: platform +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +.. This file is included into docs/history.rst + + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are available on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/RECORD b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..edef61f46ca5c0c371aef5d4bfb22afcc6c45eda --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/RECORD @@ -0,0 +1,97 @@ +../../../include/site/python3.10/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet-3.2.3.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +greenlet-3.2.3.dist-info/METADATA,sha256=4LPtpV-ZBD7xpjTg5BKO5apQWLITbBym_tFpzsdSRak,4077 +greenlet-3.2.3.dist-info/RECORD,, +greenlet-3.2.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet-3.2.3.dist-info/WHEEL,sha256=HeTPKmtWRHAcGMCPCdnU4FtSZuvZBNNr3hB4MNGY2JY,152 +greenlet-3.2.3.dist-info/licenses/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-3.2.3.dist-info/licenses/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-3.2.3.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 +greenlet/PyGreenlet.cpp,sha256=ogWsQ5VhSdItWRLLpWOgSuqYuM3QwQ4cVCxOQIgHx6E,23441 +greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 +greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 +greenlet/PyModule.cpp,sha256=J2TH06dGcNEarioS6NbWXkdME8hJY05XVbdqLrfO5w4,8587 +greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 +greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 +greenlet/TGreenlet.cpp,sha256=HGYGKpmKYqQ842tASW-QaaV8wua4a5XV_quYKPDsV_Y,25731 +greenlet/TGreenlet.hpp,sha256=zMakwTJ30onkLiAq5h1WR7ewEkiJXKM0N9AR6BnXQMI,28141 +greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 +greenlet/TMainGreenlet.cpp,sha256=fvgb8HHB-FVTPEKjR1s_ifCZSpp5D5YQByik0CnIABg,3276 +greenlet/TPythonState.cpp,sha256=vBMJT9qScTSIqhnOTVJqsGug3WbKv9dDt0cOqyhUk8w,15779 +greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 +greenlet/TThreadState.hpp,sha256=2Jgg7DtGggMYR_x3CLAvAFf1mIdIDtQvSSItcdmX4ZQ,19131 +greenlet/TThreadStateCreator.hpp,sha256=uYTexDWooXSSgUc5uh-Mhm5BQi3-kR6CqpizvNynBFQ,2610 +greenlet/TThreadStateDestroy.cpp,sha256=36yBCAMq3beXTZd-XnFA7DwaHVSOx2vc28-nf0spysU,8169 +greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 +greenlet/__init__.py,sha256=1zAfqzVVnBnQhxOL6UZ0t9jrNJUNJRgPEFvzW1qss9k,1723 +greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so,sha256=JcsWCGVCb-4g62P9Ae61ccRiENojILPQxT7Y96h01v0,1360968 +greenlet/greenlet.cpp,sha256=WdItb1yWL9WNsTqJNf0Iw8ZwDHD49pkDP0rIRGBg2pw,10996 +greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet/greenlet_allocator.hpp,sha256=kxyWW4Qdwlrc7ufgdb5vd6Y7jhauQ699Kod0mqiO1iM,1582 +greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 +greenlet/greenlet_cpython_compat.hpp,sha256=XrsoFv8nKavrdxly5_-q9lqGeE8d3wKt1YtldsAHAT8,4068 +greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 +greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 +greenlet/greenlet_msvc_compat.hpp,sha256=0MyaiyoCE_A6UROXZlMQRxRS17gfyh0d7NUppU3EVFc,2978 +greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 +greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198 +greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 +greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 +greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 +greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 +greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 +greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 +greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 +greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 +greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 +greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 +greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 +greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 +greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 +greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 +greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 +greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 +greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 +greenlet/platform/switch_riscv_unix.h,sha256=606V6ACDf79Fz_WGItnkgbjIJ0pGg_sHmPyDxQYKK58,949 +greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 +greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 +greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 +greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 +greenlet/slp_platformselect.h,sha256=hTb3GFdcPUYJTuu1MY93js7MZEax1_e5E-gflpi0RzI,3959 +greenlet/tests/__init__.py,sha256=sqxm7-dZuGBwmNI0n6xrcQJGoHHjoXUGyUTnvHidcYM,9361 +greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773 +greenlet/tests/_test_extension.cpython-310-x86_64-linux-gnu.so,sha256=p118NJ4hObhSNcvKLduspwQExvXHPDAbWVVMU6o3dqs,17256 +greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565 +greenlet/tests/_test_extension_cpp.cpython-310-x86_64-linux-gnu.so,sha256=jjtWz-wUMS5imeCnjV_p4tKeRexjRgSz8XYkfztHmw0,57880 +greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 +greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 +greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 +greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 +greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 +greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 +greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 +greenlet/tests/leakcheck.py,sha256=inbfM7_oVzd8jIKGxCgo4JqpFZaDAnWPkSULJ8vIE1s,11964 +greenlet/tests/test_contextvars.py,sha256=xutO-qZgKTwKsA9lAqTjIcTBEiQV4RpNKM-vO2_YCVU,10541 +greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 +greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 +greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923 +greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 +greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 +greenlet/tests/test_greenlet.py,sha256=rYWDvMx7ZpMlQju9KRxsBR61ela7HSJCg98JtR7RPOQ,46251 +greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 +greenlet/tests/test_leaks.py,sha256=Qeso_qH9MCWJOkk2I3VcTh7UhaNvWxrzAmNBta-fUyY,17714 +greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 +greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 +greenlet/tests/test_tracing.py,sha256=VlwzMU0C1noospZhuUMyB7MHw200emIvGCN_6G2p2ZU,8250 +greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 +greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/REQUESTED b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/WHEEL b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..6c1fa982ada600888782da66f19ebead8c0ad311 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_24_x86_64 +Tag: cp310-cp310-manylinux_2_28_x86_64 + diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b73a4a10c6fa93a3a6fa47deb3796c590a9dee10 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE.PSF b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE.PSF new file mode 100644 index 0000000000000000000000000000000000000000..d3b509a2b4119c47ad86af9fc1b400ce2eb09cf1 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/licenses/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/top_level.txt b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..46725be4fd5ff62893d8def5f9e356d4d096328a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet-3.2.3.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..a113d1be0247cdb0a6694e7360f97c68292ae73a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/greenlet/_greenlet.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25cb160865426fee20eb63fd01eeb571c46210da2320b3d0c53ed8f7a874d6fd +size 1360968 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..71f8f6036da29d9169a0fdffd8172ad912a9cc33 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/__init__.py @@ -0,0 +1,8 @@ +# mako/__init__.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +__version__ = "1.3.10" diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/_ast_util.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/_ast_util.py new file mode 100644 index 0000000000000000000000000000000000000000..385d7a0087db05bef6432cb8790ef0e7af366646 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/_ast_util.py @@ -0,0 +1,713 @@ +# mako/_ast_util.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + ast + ~~~ + + This is a stripped down version of Armin Ronacher's ast module. + + :copyright: Copyright 2008 by Armin Ronacher. + :license: Python License. +""" + + +from _ast import Add +from _ast import And +from _ast import AST +from _ast import BitAnd +from _ast import BitOr +from _ast import BitXor +from _ast import Div +from _ast import Eq +from _ast import FloorDiv +from _ast import Gt +from _ast import GtE +from _ast import If +from _ast import In +from _ast import Invert +from _ast import Is +from _ast import IsNot +from _ast import LShift +from _ast import Lt +from _ast import LtE +from _ast import Mod +from _ast import Mult +from _ast import Name +from _ast import Not +from _ast import NotEq +from _ast import NotIn +from _ast import Or +from _ast import PyCF_ONLY_AST +from _ast import RShift +from _ast import Sub +from _ast import UAdd +from _ast import USub + + +BOOLOP_SYMBOLS = {And: "and", Or: "or"} + +BINOP_SYMBOLS = { + Add: "+", + Sub: "-", + Mult: "*", + Div: "/", + FloorDiv: "//", + Mod: "%", + LShift: "<<", + RShift: ">>", + BitOr: "|", + BitAnd: "&", + BitXor: "^", +} + +CMPOP_SYMBOLS = { + Eq: "==", + Gt: ">", + GtE: ">=", + In: "in", + Is: "is", + IsNot: "is not", + Lt: "<", + LtE: "<=", + NotEq: "!=", + NotIn: "not in", +} + +UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"} + +ALL_SYMBOLS = {} +ALL_SYMBOLS.update(BOOLOP_SYMBOLS) +ALL_SYMBOLS.update(BINOP_SYMBOLS) +ALL_SYMBOLS.update(CMPOP_SYMBOLS) +ALL_SYMBOLS.update(UNARYOP_SYMBOLS) + + +def parse(expr, filename="", mode="exec"): + """Parse an expression into an AST node.""" + return compile(expr, filename, mode, PyCF_ONLY_AST) + + +def iter_fields(node): + """Iterate over all fields of a node, only yielding existing fields.""" + + for field in node._fields: + try: + yield field, getattr(node, field) + except AttributeError: + pass + + +class NodeVisitor: + + """ + Walks the abstract syntax tree and call visitor functions for every node + found. The visitor functions may return values which will be forwarded + by the `visit` method. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `get_visitor` function. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + + def get_visitor(self, node): + """ + Return the visitor function for this node or `None` if no visitor + exists for this node. In that case the generic visit function is + used instead. + """ + method = "visit_" + node.__class__.__name__ + return getattr(self, method, None) + + def visit(self, node): + """Visit a node.""" + f = self.get_visitor(node) + if f is not None: + return f(node) + return self.generic_visit(node) + + def generic_visit(self, node): + """Called if no explicit visitor function exists for a node.""" + for field, value in iter_fields(node): + if isinstance(value, list): + for item in value: + if isinstance(item, AST): + self.visit(item) + elif isinstance(value, AST): + self.visit(value) + + +class NodeTransformer(NodeVisitor): + + """ + Walks the abstract syntax tree and allows modifications of nodes. + + The `NodeTransformer` will walk the AST and use the return value of the + visitor functions to replace or remove the old node. If the return + value of the visitor function is `None` the node will be removed + from the previous location otherwise it's replaced with the return + value. The return value may be the original node in which case no + replacement takes place. + + Here an example transformer that rewrites all `foo` to `data['foo']`:: + + class RewriteName(NodeTransformer): + + def visit_Name(self, node): + return copy_location(Subscript( + value=Name(id='data', ctx=Load()), + slice=Index(value=Str(s=node.id)), + ctx=node.ctx + ), node) + + Keep in mind that if the node you're operating on has child nodes + you must either transform the child nodes yourself or call the generic + visit function for the node first. + + Nodes that were part of a collection of statements (that applies to + all statement nodes) may also return a list of nodes rather than just + a single node. + + Usually you use the transformer like this:: + + node = YourTransformer().visit(node) + """ + + def generic_visit(self, node): + for field, old_value in iter_fields(node): + old_value = getattr(node, field, None) + if isinstance(old_value, list): + new_values = [] + for value in old_value: + if isinstance(value, AST): + value = self.visit(value) + if value is None: + continue + elif not isinstance(value, AST): + new_values.extend(value) + continue + new_values.append(value) + old_value[:] = new_values + elif isinstance(old_value, AST): + new_node = self.visit(old_value) + if new_node is None: + delattr(node, field) + else: + setattr(node, field, new_node) + return node + + +class SourceGenerator(NodeVisitor): + + """ + This visitor is able to transform a well formed syntax tree into python + sourcecode. For more details have a look at the docstring of the + `node_to_source` function. + """ + + def __init__(self, indent_with): + self.result = [] + self.indent_with = indent_with + self.indentation = 0 + self.new_lines = 0 + + def write(self, x): + if self.new_lines: + if self.result: + self.result.append("\n" * self.new_lines) + self.result.append(self.indent_with * self.indentation) + self.new_lines = 0 + self.result.append(x) + + def newline(self, n=1): + self.new_lines = max(self.new_lines, n) + + def body(self, statements): + self.new_line = True + self.indentation += 1 + for stmt in statements: + self.visit(stmt) + self.indentation -= 1 + + def body_or_else(self, node): + self.body(node.body) + if node.orelse: + self.newline() + self.write("else:") + self.body(node.orelse) + + def signature(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(", ") + else: + want_comma.append(True) + + padding = [None] * (len(node.args) - len(node.defaults)) + for arg, default in zip(node.args, padding + node.defaults): + write_comma() + self.visit(arg) + if default is not None: + self.write("=") + self.visit(default) + if node.vararg is not None: + write_comma() + self.write("*" + node.vararg.arg) + if node.kwarg is not None: + write_comma() + self.write("**" + node.kwarg.arg) + + def decorators(self, node): + for decorator in node.decorator_list: + self.newline() + self.write("@") + self.visit(decorator) + + # Statements + + def visit_Assign(self, node): + self.newline() + for idx, target in enumerate(node.targets): + if idx: + self.write(", ") + self.visit(target) + self.write(" = ") + self.visit(node.value) + + def visit_AugAssign(self, node): + self.newline() + self.visit(node.target) + self.write(BINOP_SYMBOLS[type(node.op)] + "=") + self.visit(node.value) + + def visit_ImportFrom(self, node): + self.newline() + self.write("from %s%s import " % ("." * node.level, node.module)) + for idx, item in enumerate(node.names): + if idx: + self.write(", ") + self.write(item) + + def visit_Import(self, node): + self.newline() + for item in node.names: + self.write("import ") + self.visit(item) + + def visit_Expr(self, node): + self.newline() + self.generic_visit(node) + + def visit_FunctionDef(self, node): + self.newline(n=2) + self.decorators(node) + self.newline() + self.write("def %s(" % node.name) + self.signature(node.args) + self.write("):") + self.body(node.body) + + def visit_ClassDef(self, node): + have_args = [] + + def paren_or_comma(): + if have_args: + self.write(", ") + else: + have_args.append(True) + self.write("(") + + self.newline(n=3) + self.decorators(node) + self.newline() + self.write("class %s" % node.name) + for base in node.bases: + paren_or_comma() + self.visit(base) + # XXX: the if here is used to keep this module compatible + # with python 2.6. + if hasattr(node, "keywords"): + for keyword in node.keywords: + paren_or_comma() + self.write(keyword.arg + "=") + self.visit(keyword.value) + if getattr(node, "starargs", None): + paren_or_comma() + self.write("*") + self.visit(node.starargs) + if getattr(node, "kwargs", None): + paren_or_comma() + self.write("**") + self.visit(node.kwargs) + self.write(have_args and "):" or ":") + self.body(node.body) + + def visit_If(self, node): + self.newline() + self.write("if ") + self.visit(node.test) + self.write(":") + self.body(node.body) + while True: + else_ = node.orelse + if len(else_) == 1 and isinstance(else_[0], If): + node = else_[0] + self.newline() + self.write("elif ") + self.visit(node.test) + self.write(":") + self.body(node.body) + else: + self.newline() + self.write("else:") + self.body(else_) + break + + def visit_For(self, node): + self.newline() + self.write("for ") + self.visit(node.target) + self.write(" in ") + self.visit(node.iter) + self.write(":") + self.body_or_else(node) + + def visit_While(self, node): + self.newline() + self.write("while ") + self.visit(node.test) + self.write(":") + self.body_or_else(node) + + def visit_With(self, node): + self.newline() + self.write("with ") + self.visit(node.context_expr) + if node.optional_vars is not None: + self.write(" as ") + self.visit(node.optional_vars) + self.write(":") + self.body(node.body) + + def visit_Pass(self, node): + self.newline() + self.write("pass") + + def visit_Print(self, node): + # XXX: python 2.6 only + self.newline() + self.write("print ") + want_comma = False + if node.dest is not None: + self.write(" >> ") + self.visit(node.dest) + want_comma = True + for value in node.values: + if want_comma: + self.write(", ") + self.visit(value) + want_comma = True + if not node.nl: + self.write(",") + + def visit_Delete(self, node): + self.newline() + self.write("del ") + for idx, target in enumerate(node): + if idx: + self.write(", ") + self.visit(target) + + def visit_TryExcept(self, node): + self.newline() + self.write("try:") + self.body(node.body) + for handler in node.handlers: + self.visit(handler) + + def visit_TryFinally(self, node): + self.newline() + self.write("try:") + self.body(node.body) + self.newline() + self.write("finally:") + self.body(node.finalbody) + + def visit_Global(self, node): + self.newline() + self.write("global " + ", ".join(node.names)) + + def visit_Nonlocal(self, node): + self.newline() + self.write("nonlocal " + ", ".join(node.names)) + + def visit_Return(self, node): + self.newline() + self.write("return ") + self.visit(node.value) + + def visit_Break(self, node): + self.newline() + self.write("break") + + def visit_Continue(self, node): + self.newline() + self.write("continue") + + def visit_Raise(self, node): + # XXX: Python 2.6 / 3.0 compatibility + self.newline() + self.write("raise") + if hasattr(node, "exc") and node.exc is not None: + self.write(" ") + self.visit(node.exc) + if node.cause is not None: + self.write(" from ") + self.visit(node.cause) + elif hasattr(node, "type") and node.type is not None: + self.visit(node.type) + if node.inst is not None: + self.write(", ") + self.visit(node.inst) + if node.tback is not None: + self.write(", ") + self.visit(node.tback) + + # Expressions + + def visit_Attribute(self, node): + self.visit(node.value) + self.write("." + node.attr) + + def visit_Call(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(", ") + else: + want_comma.append(True) + + self.visit(node.func) + self.write("(") + for arg in node.args: + write_comma() + self.visit(arg) + for keyword in node.keywords: + write_comma() + self.write(keyword.arg + "=") + self.visit(keyword.value) + if getattr(node, "starargs", None): + write_comma() + self.write("*") + self.visit(node.starargs) + if getattr(node, "kwargs", None): + write_comma() + self.write("**") + self.visit(node.kwargs) + self.write(")") + + def visit_Name(self, node): + self.write(node.id) + + def visit_NameConstant(self, node): + self.write(str(node.value)) + + def visit_arg(self, node): + self.write(node.arg) + + def visit_Str(self, node): + self.write(repr(node.s)) + + def visit_Bytes(self, node): + self.write(repr(node.s)) + + def visit_Num(self, node): + self.write(repr(node.n)) + + # newly needed in Python 3.8 + def visit_Constant(self, node): + self.write(repr(node.value)) + + def visit_Tuple(self, node): + self.write("(") + idx = -1 + for idx, item in enumerate(node.elts): + if idx: + self.write(", ") + self.visit(item) + self.write(idx and ")" or ",)") + + def sequence_visit(left, right): + def visit(self, node): + self.write(left) + for idx, item in enumerate(node.elts): + if idx: + self.write(", ") + self.visit(item) + self.write(right) + + return visit + + visit_List = sequence_visit("[", "]") + visit_Set = sequence_visit("{", "}") + del sequence_visit + + def visit_Dict(self, node): + self.write("{") + for idx, (key, value) in enumerate(zip(node.keys, node.values)): + if idx: + self.write(", ") + self.visit(key) + self.write(": ") + self.visit(value) + self.write("}") + + def visit_BinOp(self, node): + self.write("(") + self.visit(node.left) + self.write(" %s " % BINOP_SYMBOLS[type(node.op)]) + self.visit(node.right) + self.write(")") + + def visit_BoolOp(self, node): + self.write("(") + for idx, value in enumerate(node.values): + if idx: + self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)]) + self.visit(value) + self.write(")") + + def visit_Compare(self, node): + self.write("(") + self.visit(node.left) + for op, right in zip(node.ops, node.comparators): + self.write(" %s " % CMPOP_SYMBOLS[type(op)]) + self.visit(right) + self.write(")") + + def visit_UnaryOp(self, node): + self.write("(") + op = UNARYOP_SYMBOLS[type(node.op)] + self.write(op) + if op == "not": + self.write(" ") + self.visit(node.operand) + self.write(")") + + def visit_Subscript(self, node): + self.visit(node.value) + self.write("[") + self.visit(node.slice) + self.write("]") + + def visit_Slice(self, node): + if node.lower is not None: + self.visit(node.lower) + self.write(":") + if node.upper is not None: + self.visit(node.upper) + if node.step is not None: + self.write(":") + if not (isinstance(node.step, Name) and node.step.id == "None"): + self.visit(node.step) + + def visit_ExtSlice(self, node): + for idx, item in node.dims: + if idx: + self.write(", ") + self.visit(item) + + def visit_Yield(self, node): + self.write("yield ") + self.visit(node.value) + + def visit_Lambda(self, node): + self.write("lambda ") + self.signature(node.args) + self.write(": ") + self.visit(node.body) + + def visit_Ellipsis(self, node): + self.write("Ellipsis") + + def generator_visit(left, right): + def visit(self, node): + self.write(left) + self.visit(node.elt) + for comprehension in node.generators: + self.visit(comprehension) + self.write(right) + + return visit + + visit_ListComp = generator_visit("[", "]") + visit_GeneratorExp = generator_visit("(", ")") + visit_SetComp = generator_visit("{", "}") + del generator_visit + + def visit_DictComp(self, node): + self.write("{") + self.visit(node.key) + self.write(": ") + self.visit(node.value) + for comprehension in node.generators: + self.visit(comprehension) + self.write("}") + + def visit_IfExp(self, node): + self.visit(node.body) + self.write(" if ") + self.visit(node.test) + self.write(" else ") + self.visit(node.orelse) + + def visit_Starred(self, node): + self.write("*") + self.visit(node.value) + + def visit_Repr(self, node): + # XXX: python 2.6 only + self.write("`") + self.visit(node.value) + self.write("`") + + # Helper Nodes + + def visit_alias(self, node): + self.write(node.name) + if node.asname is not None: + self.write(" as " + node.asname) + + def visit_comprehension(self, node): + self.write(" for ") + self.visit(node.target) + self.write(" in ") + self.visit(node.iter) + if node.ifs: + for if_ in node.ifs: + self.write(" if ") + self.visit(if_) + + def visit_excepthandler(self, node): + self.newline() + self.write("except") + if node.type is not None: + self.write(" ") + self.visit(node.type) + if node.name is not None: + self.write(" as ") + self.visit(node.name) + self.write(":") + self.body(node.body) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ast.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ast.py new file mode 100644 index 0000000000000000000000000000000000000000..82cfa11665168ce08d4bd3fab195fbcffedf457b --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ast.py @@ -0,0 +1,202 @@ +# mako/ast.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for analyzing expressions and blocks of Python +code, as well as generating Python from AST nodes""" + +import re + +from mako import exceptions +from mako import pyparser + + +class PythonCode: + + """represents information about a string containing Python code""" + + def __init__(self, code, **exception_kwargs): + self.code = code + + # represents all identifiers which are assigned to at some point in + # the code + self.declared_identifiers = set() + + # represents all identifiers which are referenced before their + # assignment, if any + self.undeclared_identifiers = set() + + # note that an identifier can be in both the undeclared and declared + # lists. + + # using AST to parse instead of using code.co_varnames, + # code.co_names has several advantages: + # - we can locate an identifier as "undeclared" even if + # its declared later in the same block of code + # - AST is less likely to break with version changes + # (for example, the behavior of co_names changed a little bit + # in python version 2.5) + if isinstance(code, str): + expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindIdentifiers(self, **exception_kwargs) + f.visit(expr) + + +class ArgumentList: + + """parses a fragment of code as a comma-separated list of expressions""" + + def __init__(self, code, **exception_kwargs): + self.codeargs = [] + self.args = [] + self.declared_identifiers = set() + self.undeclared_identifiers = set() + if isinstance(code, str): + if re.match(r"\S", code) and not re.match(r",\s*$", code): + # if theres text and no trailing comma, insure its parsed + # as a tuple by adding a trailing comma + code += "," + expr = pyparser.parse(code, "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindTuple(self, PythonCode, **exception_kwargs) + f.visit(expr) + + +class PythonFragment(PythonCode): + + """extends PythonCode to provide identifier lookups in partial control + statements + + e.g.:: + + for x in 5: + elif y==9: + except (MyException, e): + + """ + + def __init__(self, code, **exception_kwargs): + m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S) + if not m: + raise exceptions.CompileException( + "Fragment '%s' is not a partial control statement" % code, + **exception_kwargs, + ) + if m.group(3): + code = code[: m.start(3)] + (keyword, expr) = m.group(1, 2) + if keyword in ["for", "if", "while"]: + code = code + "pass" + elif keyword == "try": + code = code + "pass\nexcept:pass" + elif keyword in ["elif", "else"]: + code = "if False:pass\n" + code + "pass" + elif keyword == "except": + code = "try:pass\n" + code + "pass" + elif keyword == "with": + code = code + "pass" + else: + raise exceptions.CompileException( + "Unsupported control keyword: '%s'" % keyword, + **exception_kwargs, + ) + super().__init__(code, **exception_kwargs) + + +class FunctionDecl: + + """function declaration""" + + def __init__(self, code, allow_kwargs=True, **exception_kwargs): + self.code = code + expr = pyparser.parse(code, "exec", **exception_kwargs) + + f = pyparser.ParseFunc(self, **exception_kwargs) + f.visit(expr) + if not hasattr(self, "funcname"): + raise exceptions.CompileException( + "Code '%s' is not a function declaration" % code, + **exception_kwargs, + ) + if not allow_kwargs and self.kwargs: + raise exceptions.CompileException( + "'**%s' keyword argument not allowed here" + % self.kwargnames[-1], + **exception_kwargs, + ) + + def get_argument_expressions(self, as_call=False): + """Return the argument declarations of this FunctionDecl as a printable + list. + + By default the return value is appropriate for writing in a ``def``; + set `as_call` to true to build arguments to be passed to the function + instead (assuming locals with the same names as the arguments exist). + """ + + namedecls = [] + + # Build in reverse order, since defaults and slurpy args come last + argnames = self.argnames[::-1] + kwargnames = self.kwargnames[::-1] + defaults = self.defaults[::-1] + kwdefaults = self.kwdefaults[::-1] + + # Named arguments + if self.kwargs: + namedecls.append("**" + kwargnames.pop(0)) + + for name in kwargnames: + # Keyword-only arguments must always be used by name, so even if + # this is a call, print out `foo=foo` + if as_call: + namedecls.append("%s=%s" % (name, name)) + elif kwdefaults: + default = kwdefaults.pop(0) + if default is None: + # The AST always gives kwargs a default, since you can do + # `def foo(*, a=1, b, c=3)` + namedecls.append(name) + else: + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) + else: + namedecls.append(name) + + # Positional arguments + if self.varargs: + namedecls.append("*" + argnames.pop(0)) + + for name in argnames: + if as_call or not defaults: + namedecls.append(name) + else: + default = defaults.pop(0) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) + + namedecls.reverse() + return namedecls + + @property + def allargnames(self): + return tuple(self.argnames) + tuple(self.kwargnames) + + +class FunctionArgs(FunctionDecl): + + """the argument portion of a function declaration""" + + def __init__(self, code, **kwargs): + super().__init__("def ANON(%s):pass" % code, **kwargs) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cache.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..267411c33a38a176af34ac3dcf356c140d67a2bb --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cache.py @@ -0,0 +1,239 @@ +# mako/cache.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import util + +_cache_plugins = util.PluginLoader("mako.cache") + +register_plugin = _cache_plugins.register +register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl") + + +class Cache: + + """Represents a data content cache made available to the module + space of a specific :class:`.Template` object. + + .. versionadded:: 0.6 + :class:`.Cache` by itself is mostly a + container for a :class:`.CacheImpl` object, which implements + a fixed API to provide caching services; specific subclasses exist to + implement different + caching strategies. Mako includes a backend that works with + the Beaker caching system. Beaker itself then supports + a number of backends (i.e. file, memory, memcached, etc.) + + The construction of a :class:`.Cache` is part of the mechanics + of a :class:`.Template`, and programmatic access to this + cache is typically via the :attr:`.Template.cache` attribute. + + """ + + impl = None + """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`. + + This accessor allows a :class:`.CacheImpl` with additional + methods beyond that of :class:`.Cache` to be used programmatically. + + """ + + id = None + """Return the 'id' that identifies this cache. + + This is a value that should be globally unique to the + :class:`.Template` associated with this cache, and can + be used by a caching system to name a local container + for data specific to this template. + + """ + + starttime = None + """Epochal time value for when the owning :class:`.Template` was + first compiled. + + A cache implementation may wish to invalidate data earlier than + this timestamp; this has the effect of the cache for a specific + :class:`.Template` starting clean any time the :class:`.Template` + is recompiled, such as when the original template file changed on + the filesystem. + + """ + + def __init__(self, template, *args): + # check for a stale template calling the + # constructor + if isinstance(template, str) and args: + return + self.template = template + self.id = template.module.__name__ + self.starttime = template.module._modified_time + self._def_regions = {} + self.impl = self._load_impl(self.template.cache_impl) + + def _load_impl(self, name): + return _cache_plugins.load(name)(self) + + def get_or_create(self, key, creation_function, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + return self._ctx_get_or_create(key, creation_function, None, **kw) + + def _ctx_get_or_create(self, key, creation_function, context, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + if not self.template.cache_enabled: + return creation_function() + + return self.impl.get_or_create( + key, creation_function, **self._get_cache_kw(kw, context) + ) + + def set(self, key, value, **kw): + r"""Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + + self.impl.set(key, value, **self._get_cache_kw(kw, None)) + + put = set + """A synonym for :meth:`.Cache.set`. + + This is here for backwards compatibility. + + """ + + def get(self, key, **kw): + r"""Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + return self.impl.get(key, **self._get_cache_kw(kw, None)) + + def invalidate(self, key, **kw): + r"""Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + self.impl.invalidate(key, **self._get_cache_kw(kw, None)) + + def invalidate_body(self): + """Invalidate the cached content of the "body" method for this + template. + + """ + self.invalidate("render_body", __M_defname="render_body") + + def invalidate_def(self, name): + """Invalidate the cached content of a particular ``<%def>`` within this + template. + + """ + + self.invalidate("render_%s" % name, __M_defname="render_%s" % name) + + def invalidate_closure(self, name): + """Invalidate a nested ``<%def>`` within this template. + + Caching of nested defs is a blunt tool as there is no + management of scope -- nested defs that use cache tags + need to have names unique of all other nested defs in the + template, else their content will be overwritten by + each other. + + """ + + self.invalidate(name, __M_defname=name) + + def _get_cache_kw(self, kw, context): + defname = kw.pop("__M_defname", None) + if not defname: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + elif defname in self._def_regions: + tmpl_kw = self._def_regions[defname] + else: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + self._def_regions[defname] = tmpl_kw + if context and self.impl.pass_context: + tmpl_kw = tmpl_kw.copy() + tmpl_kw.setdefault("context", context) + return tmpl_kw + + +class CacheImpl: + + """Provide a cache implementation for use by :class:`.Cache`.""" + + def __init__(self, cache): + self.cache = cache + + pass_context = False + """If ``True``, the :class:`.Context` will be passed to + :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``. + """ + + def get_or_create(self, key, creation_function, **kw): + r"""Retrieve a value from the cache, using the given creation function + to generate a new value. + + This function *must* return a value, either from + the cache, or via the given creation function. + If the creation function is called, the newly + created value should be populated into the cache + under the given key before being returned. + + :param key: the value's key. + :param creation_function: function that when called generates + a new value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def set(self, key, value, **kw): + r"""Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def get(self, key, **kw): + r"""Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def invalidate(self, key, **kw): + r"""Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cmd.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cmd.py new file mode 100644 index 0000000000000000000000000000000000000000..3fff197ba76b91819825ba38ed840a4e03d1ba73 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/cmd.py @@ -0,0 +1,99 @@ +# mako/cmd.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from argparse import ArgumentParser +from os.path import dirname +from os.path import isfile +import sys + +from mako import exceptions +from mako.lookup import TemplateLookup +from mako.template import Template + + +def varsplit(var): + if "=" not in var: + return (var, "") + return var.split("=", 1) + + +def _exit(): + sys.stderr.write(exceptions.text_error_template().render()) + sys.exit(1) + + +def cmdline(argv=None): + parser = ArgumentParser() + parser.add_argument( + "--var", + default=[], + action="append", + help="variable (can be used multiple times, use name=value)", + ) + parser.add_argument( + "--template-dir", + default=[], + action="append", + help="Directory to use for template lookup (multiple " + "directories may be provided). If not given then if the " + "template is read from stdin, the value defaults to be " + "the current directory, otherwise it defaults to be the " + "parent directory of the file provided.", + ) + parser.add_argument( + "--output-encoding", default=None, help="force output encoding" + ) + parser.add_argument( + "--output-file", + default=None, + help="Write to file upon successful render instead of stdout", + ) + parser.add_argument("input", nargs="?", default="-") + + options = parser.parse_args(argv) + + output_encoding = options.output_encoding + output_file = options.output_file + + if options.input == "-": + lookup_dirs = options.template_dir or ["."] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template( + sys.stdin.read(), + lookup=lookup, + output_encoding=output_encoding, + ) + except: + _exit() + else: + filename = options.input + if not isfile(filename): + raise SystemExit("error: can't find %s" % filename) + lookup_dirs = options.template_dir or [dirname(filename)] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template( + filename=filename, + lookup=lookup, + output_encoding=output_encoding, + ) + except: + _exit() + + kw = dict(varsplit(var) for var in options.var) + try: + rendered = template.render(**kw) + except: + _exit() + else: + if output_file: + open(output_file, "wt", encoding=output_encoding).write(rendered) + else: + sys.stdout.write(rendered) + + +if __name__ == "__main__": + cmdline() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/codegen.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/codegen.py new file mode 100644 index 0000000000000000000000000000000000000000..b5293f4757f1216d0b87013e9b3d52b9d0d167ae --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/codegen.py @@ -0,0 +1,1319 @@ +# mako/codegen.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides functionality for rendering a parsetree constructing into module +source code.""" + +import json +import re +import time + +from mako import ast +from mako import exceptions +from mako import filters +from mako import parsetree +from mako import util +from mako.pygen import PythonPrinter + + +MAGIC_NUMBER = 10 + +# names which are hardwired into the +# template and are not accessed via the +# context itself +TOPLEVEL_DECLARED = {"UNDEFINED", "STOP_RENDERING"} +RESERVED_NAMES = {"context", "loop"}.union(TOPLEVEL_DECLARED) + + +def compile( # noqa + node, + uri, + filename=None, + default_filters=None, + buffer_filters=None, + imports=None, + future_imports=None, + source_encoding=None, + generate_magic_comment=True, + strict_undefined=False, + enable_loop=True, + reserved_names=frozenset(), +): + """Generate module source code given a parsetree node, + uri, and optional source filename""" + + buf = util.FastEncodingBuffer() + + printer = PythonPrinter(buf) + _GenerateRenderMethod( + printer, + _CompileContext( + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + strict_undefined, + enable_loop, + reserved_names, + ), + node, + ) + return buf.getvalue() + + +class _CompileContext: + def __init__( + self, + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + strict_undefined, + enable_loop, + reserved_names, + ): + self.uri = uri + self.filename = filename + self.default_filters = default_filters + self.buffer_filters = buffer_filters + self.imports = imports + self.future_imports = future_imports + self.source_encoding = source_encoding + self.generate_magic_comment = generate_magic_comment + self.strict_undefined = strict_undefined + self.enable_loop = enable_loop + self.reserved_names = reserved_names + + +class _GenerateRenderMethod: + + """A template visitor object which generates the + full module source for a template. + + """ + + def __init__(self, printer, compiler, node): + self.printer = printer + self.compiler = compiler + self.node = node + self.identifier_stack = [None] + self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag)) + + if self.in_def: + name = "render_%s" % node.funcname + args = node.get_argument_expressions() + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) + defs = None + pagetag = None + if node.is_block and not node.is_anonymous: + args += ["**pageargs"] + else: + defs = self.write_toplevel() + pagetag = self.compiler.pagetag + name = "render_body" + if pagetag is not None: + args = pagetag.body_decl.get_argument_expressions() + if not pagetag.body_decl.kwargs: + args += ["**pageargs"] + cached = eval(pagetag.attributes.get("cached", "False")) + self.compiler.enable_loop = self.compiler.enable_loop or eval( + pagetag.attributes.get("enable_loop", "False") + ) + else: + args = ["**pageargs"] + cached = False + buffered = filtered = False + if args is None: + args = ["context"] + else: + args = [a for a in ["context"] + args] + + self.write_render_callable( + pagetag or node, name, args, buffered, filtered, cached + ) + + if defs is not None: + for node in defs: + _GenerateRenderMethod(printer, compiler, node) + + if not self.in_def: + self.write_metadata_struct() + + def write_metadata_struct(self): + self.printer.source_map[self.printer.lineno] = max( + self.printer.source_map + ) + struct = { + "filename": self.compiler.filename, + "uri": self.compiler.uri, + "source_encoding": self.compiler.source_encoding, + "line_map": self.printer.source_map, + } + self.printer.writelines( + '"""', + "__M_BEGIN_METADATA", + json.dumps(struct), + "__M_END_METADATA\n" '"""', + ) + + @property + def identifiers(self): + return self.identifier_stack[-1] + + def write_toplevel(self): + """Traverse a template structure for module-level directives and + generate the start of module-level code. + + """ + inherit = [] + namespaces = {} + module_code = [] + + self.compiler.pagetag = None + + class FindTopLevel: + def visitInheritTag(s, node): + inherit.append(node) + + def visitNamespaceTag(s, node): + namespaces[node.name] = node + + def visitPageTag(s, node): + self.compiler.pagetag = node + + def visitCode(s, node): + if node.ismodule: + module_code.append(node) + + f = FindTopLevel() + for n in self.node.nodes: + n.accept_visitor(f) + + self.compiler.namespaces = namespaces + + module_ident = set() + for n in module_code: + module_ident = module_ident.union(n.declared_identifiers()) + + module_identifiers = _Identifiers(self.compiler) + module_identifiers.declared = module_ident + + # module-level names, python code + if ( + self.compiler.generate_magic_comment + and self.compiler.source_encoding + ): + self.printer.writeline( + "# -*- coding:%s -*-" % self.compiler.source_encoding + ) + + if self.compiler.future_imports: + self.printer.writeline( + "from __future__ import %s" + % (", ".join(self.compiler.future_imports),) + ) + self.printer.writeline("from mako import runtime, filters, cache") + self.printer.writeline("UNDEFINED = runtime.UNDEFINED") + self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING") + self.printer.writeline("__M_dict_builtin = dict") + self.printer.writeline("__M_locals_builtin = locals") + self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER) + self.printer.writeline("_modified_time = %r" % time.time()) + self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop) + self.printer.writeline( + "_template_filename = %r" % self.compiler.filename + ) + self.printer.writeline("_template_uri = %r" % self.compiler.uri) + self.printer.writeline( + "_source_encoding = %r" % self.compiler.source_encoding + ) + if self.compiler.imports: + buf = "" + for imp in self.compiler.imports: + buf += imp + "\n" + self.printer.writeline(imp) + impcode = ast.PythonCode( + buf, + source="", + lineno=0, + pos=0, + filename="template defined imports", + ) + else: + impcode = None + + main_identifiers = module_identifiers.branch(self.node) + mit = module_identifiers.topleveldefs + module_identifiers.topleveldefs = mit.union( + main_identifiers.topleveldefs + ) + module_identifiers.declared.update(TOPLEVEL_DECLARED) + if impcode: + module_identifiers.declared.update(impcode.declared_identifiers) + + self.compiler.identifiers = module_identifiers + self.printer.writeline( + "_exports = %r" + % [n.name for n in main_identifiers.topleveldefs.values()] + ) + self.printer.write_blanks(2) + + if len(module_code): + self.write_module_code(module_code) + + if len(inherit): + self.write_namespaces(namespaces) + self.write_inherit(inherit[-1]) + elif len(namespaces): + self.write_namespaces(namespaces) + + return list(main_identifiers.topleveldefs.values()) + + def write_render_callable( + self, node, name, args, buffered, filtered, cached + ): + """write a top-level render callable. + + this could be the main render() method or that of a top-level def.""" + + if self.in_def: + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_toplevel(%s)" % decorator + ) + + self.printer.start_source(node.lineno) + self.printer.writelines( + "def %s(%s):" % (name, ",".join(args)), + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:", + ) + if buffered or filtered or cached: + self.printer.writeline("context._push_buffer()") + + self.identifier_stack.append( + self.compiler.identifiers.branch(self.node) + ) + if (not self.in_def or self.node.is_block) and "**pageargs" in args: + self.identifier_stack[-1].argument_declared.add("pageargs") + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + self.printer.writeline( + "__M_locals = __M_dict_builtin(%s)" + % ",".join( + [ + "%s=%s" % (x, x) + for x in self.identifiers.argument_declared + ] + ) + ) + + self.write_variable_declares(self.identifiers, toplevel=True) + + for n in self.node.nodes: + n.accept_visitor(self) + + self.write_def_finish(self.node, buffered, filtered, cached) + self.printer.writeline(None) + self.printer.write_blanks(2) + if cached: + self.write_cache_decorator( + node, name, args, buffered, self.identifiers, toplevel=True + ) + + def write_module_code(self, module_code): + """write module-level template code, i.e. that which + is enclosed in <%! %> tags in the template.""" + for n in module_code: + self.printer.write_indented_block(n.text, starting_lineno=n.lineno) + + def write_inherit(self, node): + """write the module-level inheritance-determination callable.""" + + self.printer.writelines( + "def _mako_inherit(template, context):", + "_mako_generate_namespaces(context)", + "return runtime._inherit_from(context, %s, _template_uri)" + % (node.parsed_attributes["file"]), + None, + ) + + def write_namespaces(self, namespaces): + """write the module-level namespace-generating callable.""" + self.printer.writelines( + "def _mako_get_namespace(context, name):", + "try:", + "return context.namespaces[(__name__, name)]", + "except KeyError:", + "_mako_generate_namespaces(context)", + "return context.namespaces[(__name__, name)]", + None, + None, + ) + self.printer.writeline("def _mako_generate_namespaces(context):") + + for node in namespaces.values(): + if "import" in node.attributes: + self.compiler.has_ns_imports = True + self.printer.start_source(node.lineno) + if len(node.nodes): + self.printer.writeline("def make_namespace():") + export = [] + identifiers = self.compiler.identifiers.branch(node) + self.in_def = True + + class NSDefVisitor: + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + if node.is_anonymous: + raise exceptions.CompileException( + "Can't put anonymous blocks inside " + "<%namespace>", + **node.exception_kwargs, + ) + self.write_inline_def(node, identifiers, nested=False) + export.append(node.funcname) + + vis = NSDefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.printer.writeline("return [%s]" % (",".join(export))) + self.printer.writeline(None) + self.in_def = False + callable_name = "make_namespace()" + else: + callable_name = "None" + + if "file" in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.TemplateNamespace(%r," + " context._clean_inheritance_tokens()," + " templateuri=%s, callables=%s, " + " calling_uri=_template_uri)" + % ( + node.name, + node.parsed_attributes.get("file", "None"), + callable_name, + ) + ) + elif "module" in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.ModuleNamespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri," + " module=%s)" + % ( + node.name, + callable_name, + node.parsed_attributes.get("module", "None"), + ) + ) + else: + self.printer.writeline( + "ns = runtime.Namespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri)" + % (node.name, callable_name) + ) + if eval(node.attributes.get("inheritable", "False")): + self.printer.writeline("context['self'].%s = ns" % (node.name)) + + self.printer.writeline( + "context.namespaces[(__name__, %s)] = ns" % repr(node.name) + ) + self.printer.write_blanks(1) + if not len(namespaces): + self.printer.writeline("pass") + self.printer.writeline(None) + + def write_variable_declares(self, identifiers, toplevel=False, limit=None): + """write variable declarations at the top of a function. + + the variable declarations are in the form of callable + definitions for defs and/or name lookup within the + function's context argument. the names declared are based + on the names that are referenced in the function body, + which don't otherwise have any explicit assignment + operation. names that are assigned within the body are + assumed to be locally-scoped variables and are not + separately declared. + + for def callable definitions, if the def is a top-level + callable then a 'stub' callable is generated which wraps + the current Context into a closure. if the def is not + top-level, it is fully rendered as a local closure. + + """ + + # collection of all defs available to us in this scope + comp_idents = {c.funcname: c for c in identifiers.defs} + to_write = set() + + # write "context.get()" for all variables we are going to + # need that arent in the namespace yet + to_write = to_write.union(identifiers.undeclared) + + # write closure functions for closures that we define + # right here + to_write = to_write.union( + [c.funcname for c in identifiers.closuredefs.values()] + ) + + # remove identifiers that are declared in the argument + # signature of the callable + to_write = to_write.difference(identifiers.argument_declared) + + # remove identifiers that we are going to assign to. + # in this way we mimic Python's behavior, + # i.e. assignment to a variable within a block + # means that variable is now a "locally declared" var, + # which cannot be referenced beforehand. + to_write = to_write.difference(identifiers.locally_declared) + + if self.compiler.enable_loop: + has_loop = "loop" in to_write + to_write.discard("loop") + else: + has_loop = False + + # if a limiting set was sent, constraint to those items in that list + # (this is used for the caching decorator) + if limit is not None: + to_write = to_write.intersection(limit) + + if toplevel and getattr(self.compiler, "has_ns_imports", False): + self.printer.writeline("_import_ns = {}") + self.compiler.has_imports = True + for ident, ns in self.compiler.namespaces.items(): + if "import" in ns.attributes: + self.printer.writeline( + "_mako_get_namespace(context, %r)." + "_populate(_import_ns, %r)" + % ( + ident, + re.split(r"\s*,\s*", ns.attributes["import"]), + ) + ) + + if has_loop: + self.printer.writeline("loop = __M_loop = runtime.LoopStack()") + + for ident in to_write: + if ident in comp_idents: + comp = comp_idents[ident] + if comp.is_block: + if not comp.is_anonymous: + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + else: + if comp.is_root(): + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + + elif ident in self.compiler.namespaces: + self.printer.writeline( + "%s = _mako_get_namespace(context, %r)" % (ident, ident) + ) + else: + if getattr(self.compiler, "has_ns_imports", False): + if self.compiler.strict_undefined: + self.printer.writelines( + "%s = _import_ns.get(%r, UNDEFINED)" + % (ident, ident), + "if %s is UNDEFINED:" % ident, + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % ident, + None, + None, + ) + else: + self.printer.writeline( + "%s = _import_ns.get" + "(%r, context.get(%r, UNDEFINED))" + % (ident, ident, ident) + ) + else: + if self.compiler.strict_undefined: + self.printer.writelines( + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % ident, + None, + ) + else: + self.printer.writeline( + "%s = context.get(%r, UNDEFINED)" % (ident, ident) + ) + + self.printer.writeline("__M_writer = context.writer()") + + def write_def_decl(self, node, identifiers): + """write a locally-available callable referencing a top-level def""" + funcname = node.funcname + namedecls = node.get_argument_expressions() + nameargs = node.get_argument_expressions(as_call=True) + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + nameargs.insert(0, "context._locals(__M_locals)") + else: + nameargs.insert(0, "context") + self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) + self.printer.writeline( + "return render_%s(%s)" % (funcname, ",".join(nameargs)) + ) + self.printer.writeline(None) + + def write_inline_def(self, node, identifiers, nested): + """write a locally-available def callable inside an enclosing def.""" + + namedecls = node.get_argument_expressions() + + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_inline(context, %s)" % decorator + ) + self.printer.writeline( + "def %s(%s):" % (node.funcname, ",".join(namedecls)) + ) + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) + self.printer.writelines( + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:", + ) + if buffered or filtered or cached: + self.printer.writelines("context._push_buffer()") + + identifiers = identifiers.branch(node, nested=nested) + + self.write_variable_declares(identifiers) + + self.identifier_stack.append(identifiers) + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, filtered, cached) + self.printer.writeline(None) + if cached: + self.write_cache_decorator( + node, + node.funcname, + namedecls, + False, + identifiers, + inline=True, + toplevel=False, + ) + + def write_def_finish( + self, node, buffered, filtered, cached, callstack=True + ): + """write the end section of a rendering function, either outermost or + inline. + + this takes into account if the rendering function was filtered, + buffered, etc. and closes the corresponding try: block if any, and + writes code to retrieve captured content, apply filters, send proper + return value.""" + + if not buffered and not cached and not filtered: + self.printer.writeline("return ''") + if callstack: + self.printer.writelines( + "finally:", "context.caller_stack._pop_frame()", None + ) + + if buffered or filtered or cached: + if buffered or cached: + # in a caching scenario, don't try to get a writer + # from the context after popping; assume the caching + # implemenation might be using a context with no + # extra buffers + self.printer.writelines( + "finally:", "__M_buf = context._pop_buffer()" + ) + else: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()", + ) + + if callstack: + self.printer.writeline("context.caller_stack._pop_frame()") + + s = "__M_buf.getvalue()" + if filtered: + s = self.create_filter_callable( + node.filter_args.args, s, False + ) + self.printer.writeline(None) + if buffered and not cached: + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) + if buffered or cached: + self.printer.writeline("return %s" % s) + else: + self.printer.writelines("__M_writer(%s)" % s, "return ''") + + def write_cache_decorator( + self, + node_or_pagetag, + name, + args, + buffered, + identifiers, + inline=False, + toplevel=False, + ): + """write a post-function decorator to replace a rendering + callable with a cached version of itself.""" + + self.printer.writeline("__M_%s = %s" % (name, name)) + cachekey = node_or_pagetag.parsed_attributes.get( + "cache_key", repr(name) + ) + + cache_args = {} + if self.compiler.pagetag is not None: + cache_args.update( + (pa[6:], self.compiler.pagetag.parsed_attributes[pa]) + for pa in self.compiler.pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" + ) + cache_args.update( + (pa[6:], node_or_pagetag.parsed_attributes[pa]) + for pa in node_or_pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" + ) + if "timeout" in cache_args: + cache_args["timeout"] = int(eval(cache_args["timeout"])) + + self.printer.writeline("def %s(%s):" % (name, ",".join(args))) + + # form "arg1, arg2, arg3=arg3, arg4=arg4", etc. + pass_args = [ + "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args + ] + + self.write_variable_declares( + identifiers, + toplevel=toplevel, + limit=node_or_pagetag.undeclared_identifiers(), + ) + if buffered: + s = ( + "context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, + ) + ) + # apply buffer_filters + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) + self.printer.writelines("return " + s, None) + else: + self.printer.writelines( + "__M_writer(context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, + ), + "return ''", + None, + ) + + def create_filter_callable(self, args, target, is_expression): + """write a filter-applying expression based on the filters + present in the given filter names, adjusting for the global + 'default' filter aliases as needed.""" + + def locate_encode(name): + if re.match(r"decode\..+", name): + return "filters." + name + else: + return filters.DEFAULT_ESCAPES.get(name, name) + + if "n" not in args: + if is_expression: + if self.compiler.pagetag: + args = self.compiler.pagetag.filter_args.args + args + if self.compiler.default_filters and "n" not in args: + args = self.compiler.default_filters + args + for e in args: + # if filter given as a function, get just the identifier portion + if e == "n": + continue + m = re.match(r"(.+?)(\(.*\))", e) + if m: + ident, fargs = m.group(1, 2) + f = locate_encode(ident) + e = f + fargs + else: + e = locate_encode(e) + assert e is not None + target = "%s(%s)" % (e, target) + return target + + def visitExpression(self, node): + self.printer.start_source(node.lineno) + if ( + len(node.escapes) + or ( + self.compiler.pagetag is not None + and len(self.compiler.pagetag.filter_args.args) + ) + or len(self.compiler.default_filters) + ): + s = self.create_filter_callable( + node.escapes_code.args, "%s" % node.text, True + ) + self.printer.writeline("__M_writer(%s)" % s) + else: + self.printer.writeline("__M_writer(%s)" % node.text) + + def visitControlLine(self, node): + if node.isend: + self.printer.writeline(None) + if node.has_loop_context: + self.printer.writeline("finally:") + self.printer.writeline("loop = __M_loop._exit()") + self.printer.writeline(None) + else: + self.printer.start_source(node.lineno) + if self.compiler.enable_loop and node.keyword == "for": + text = mangle_mako_loop(node, self.printer) + else: + text = node.text + self.printer.writeline(text) + children = node.get_children() + + # this covers the four situations where we want to insert a pass: + # 1) a ternary control line with no children, + # 2) a primary control line with nothing but its own ternary + # and end control lines, and + # 3) any control line with no content other than comments + # 4) the first control block with no content other than comments + def _search_for_control_line(): + for c in children: + if isinstance(c, parsetree.Comment): + continue + elif isinstance(c, parsetree.ControlLine): + return True + return False + + if ( + not children + or all( + isinstance(c, (parsetree.Comment, parsetree.ControlLine)) + for c in children + ) + and all( + (node.is_ternary(c.keyword) or c.isend) + for c in children + if isinstance(c, parsetree.ControlLine) + ) + or _search_for_control_line() + ): + self.printer.writeline("pass") + + def visitText(self, node): + self.printer.start_source(node.lineno) + self.printer.writeline("__M_writer(%s)" % repr(node.content)) + + def visitTextTag(self, node): + filtered = len(node.filter_args.args) > 0 + if filtered: + self.printer.writelines( + "__M_writer = context._push_writer()", "try:" + ) + for n in node.nodes: + n.accept_visitor(self) + if filtered: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()", + "__M_writer(%s)" + % self.create_filter_callable( + node.filter_args.args, "__M_buf.getvalue()", False + ), + None, + ) + + def visitCode(self, node): + if not node.ismodule: + self.printer.write_indented_block( + node.text, starting_lineno=node.lineno + ) + + if not self.in_def and len(self.identifiers.locally_assigned) > 0: + # if we are the "template" def, fudge locally + # declared/modified variables into the "__M_locals" dictionary, + # which is used for def calls within the same template, + # to simulate "enclosing scope" + self.printer.writeline( + "__M_locals_builtin_stored = __M_locals_builtin()" + ) + self.printer.writeline( + "__M_locals.update(__M_dict_builtin([(__M_key," + " __M_locals_builtin_stored[__M_key]) for __M_key in" + " [%s] if __M_key in __M_locals_builtin_stored]))" + % ",".join([repr(x) for x in node.declared_identifiers()]) + ) + + def visitIncludeTag(self, node): + self.printer.start_source(node.lineno) + args = node.attributes.get("args") + if args: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri, %s)" + % (node.parsed_attributes["file"], args) + ) + else: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri)" + % (node.parsed_attributes["file"]) + ) + + def visitNamespaceTag(self, node): + pass + + def visitDefTag(self, node): + pass + + def visitBlockTag(self, node): + if node.is_anonymous: + self.printer.writeline("%s()" % node.funcname) + else: + nameargs = node.get_argument_expressions(as_call=True) + nameargs += ["**pageargs"] + self.printer.writeline( + "if 'parent' not in context._data or " + "not hasattr(context._data['parent'], '%s'):" % node.funcname + ) + self.printer.writeline( + "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)) + ) + self.printer.writeline("\n") + + def visitCallNamespaceTag(self, node): + # TODO: we can put namespace-specific checks here, such + # as ensure the given namespace will be imported, + # pre-import the namespace, etc. + self.visitCallTag(node) + + def visitCallTag(self, node): + self.printer.writeline("def ccall(caller):") + export = ["body"] + callable_identifiers = self.identifiers.branch(node, nested=True) + body_identifiers = callable_identifiers.branch(node, nested=False) + # we want the 'caller' passed to ccall to be used + # for the body() function, but for other non-body() + # <%def>s within <%call> we want the current caller + # off the call stack (if any) + body_identifiers.add_declared("caller") + + self.identifier_stack.append(body_identifiers) + + class DefVisitor: + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + self.write_inline_def(node, callable_identifiers, nested=False) + if not node.is_anonymous: + export.append(node.funcname) + # remove defs that are within the <%call> from the + # "closuredefs" defined in the body, so they dont render twice + if node.funcname in body_identifiers.closuredefs: + del body_identifiers.closuredefs[node.funcname] + + vis = DefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.identifier_stack.pop() + + bodyargs = node.body_decl.get_argument_expressions() + self.printer.writeline("def body(%s):" % ",".join(bodyargs)) + + # TODO: figure out best way to specify + # buffering/nonbuffering (at call time would be better) + buffered = False + if buffered: + self.printer.writelines("context._push_buffer()", "try:") + self.write_variable_declares(body_identifiers) + self.identifier_stack.append(body_identifiers) + + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, False, False, callstack=False) + self.printer.writelines(None, "return [%s]" % (",".join(export)), None) + + self.printer.writelines( + # push on caller for nested call + "context.caller_stack.nextcaller = " + "runtime.Namespace('caller', context, " + "callables=ccall(__M_caller))", + "try:", + ) + self.printer.start_source(node.lineno) + self.printer.writelines( + "__M_writer(%s)" + % self.create_filter_callable([], node.expression, True), + "finally:", + "context.caller_stack.nextcaller = None", + None, + ) + + +class _Identifiers: + + """tracks the status of identifier names as template code is rendered.""" + + def __init__(self, compiler, node=None, parent=None, nested=False): + if parent is not None: + # if we are the branch created in write_namespaces(), + # we don't share any context from the main body(). + if isinstance(node, parsetree.NamespaceTag): + self.declared = set() + self.topleveldefs = util.SetLikeDict() + else: + # things that have already been declared + # in an enclosing namespace (i.e. names we can just use) + self.declared = ( + set(parent.declared) + .union([c.name for c in parent.closuredefs.values()]) + .union(parent.locally_declared) + .union(parent.argument_declared) + ) + + # if these identifiers correspond to a "nested" + # scope, it means whatever the parent identifiers + # had as undeclared will have been declared by that parent, + # and therefore we have them in our scope. + if nested: + self.declared = self.declared.union(parent.undeclared) + + # top level defs that are available + self.topleveldefs = util.SetLikeDict(**parent.topleveldefs) + else: + self.declared = set() + self.topleveldefs = util.SetLikeDict() + + self.compiler = compiler + + # things within this level that are referenced before they + # are declared (e.g. assigned to) + self.undeclared = set() + + # things that are declared locally. some of these things + # could be in the "undeclared" list as well if they are + # referenced before declared + self.locally_declared = set() + + # assignments made in explicit python blocks. + # these will be propagated to + # the context of local def calls. + self.locally_assigned = set() + + # things that are declared in the argument + # signature of the def callable + self.argument_declared = set() + + # closure defs that are defined in this level + self.closuredefs = util.SetLikeDict() + + self.node = node + + if node is not None: + node.accept_visitor(self) + + illegal_names = self.compiler.reserved_names.intersection( + self.locally_declared + ) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words declared in template: %s" + % ", ".join(illegal_names) + ) + + def branch(self, node, **kwargs): + """create a new Identifiers for a new Node, with + this Identifiers as the parent.""" + + return _Identifiers(self.compiler, node, self, **kwargs) + + @property + def defs(self): + return set(self.topleveldefs.union(self.closuredefs).values()) + + def __repr__(self): + return ( + "Identifiers(declared=%r, locally_declared=%r, " + "undeclared=%r, topleveldefs=%r, closuredefs=%r, " + "argumentdeclared=%r)" + % ( + list(self.declared), + list(self.locally_declared), + list(self.undeclared), + [c.name for c in self.topleveldefs.values()], + [c.name for c in self.closuredefs.values()], + self.argument_declared, + ) + ) + + def check_declared(self, node): + """update the state of this Identifiers with the undeclared + and declared identifiers of the given node.""" + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.locally_declared.add(ident) + + def add_declared(self, ident): + self.declared.add(ident) + if ident in self.undeclared: + self.undeclared.remove(ident) + + def visitExpression(self, node): + self.check_declared(node) + + def visitControlLine(self, node): + self.check_declared(node) + + def visitCode(self, node): + if not node.ismodule: + self.check_declared(node) + self.locally_assigned = self.locally_assigned.union( + node.declared_identifiers() + ) + + def visitNamespaceTag(self, node): + # only traverse into the sub-elements of a + # <%namespace> tag if we are the branch created in + # write_namespaces() + if self.node is node: + for n in node.nodes: + n.accept_visitor(self) + + def _check_name_exists(self, collection, node): + existing = collection.get(node.funcname) + collection[node.funcname] = node + if ( + existing is not None + and existing is not node + and (node.is_block or existing.is_block) + ): + raise exceptions.CompileException( + "%%def or %%block named '%s' already " + "exists in this template." % node.funcname, + **node.exception_kwargs, + ) + + def visitDefTag(self, node): + if node.is_root() and not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + # visit defs only one level deep + if node is self.node: + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + + for n in node.nodes: + n.accept_visitor(self) + + def visitBlockTag(self, node): + if node is not self.node and not node.is_anonymous: + if isinstance(self.node, parsetree.DefTag): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of def '%s'" + % (node.name, self.node.name), + **node.exception_kwargs, + ) + elif isinstance( + self.node, (parsetree.CallTag, parsetree.CallNamespaceTag) + ): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of <%%call> tag" + % (node.name,), + **node.exception_kwargs, + ) + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + if not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + self.undeclared.add(node.funcname) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + + def visitTextTag(self, node): + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + def visitIncludeTag(self, node): + self.check_declared(node) + + def visitPageTag(self, node): + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + self.check_declared(node) + + def visitCallNamespaceTag(self, node): + self.visitCallTag(node) + + def visitCallTag(self, node): + if node is self.node: + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + else: + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + +_FOR_LOOP = re.compile( + r"^for\s+((?:\(?)\s*" + r"(?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z_0-9]*),??)*\s*(?:\)?)" + r"(?:\s*,\s*(?:" + r"(?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z_0-9]*),??)*\s*(?:\)?)" + r"),??)*\s*(?:\)?))\s+in\s+(.*):" +) + + +def mangle_mako_loop(node, printer): + """converts a for loop into a context manager wrapped around a for loop + when access to the `loop` variable has been detected in the for loop body + """ + loop_variable = LoopVariable() + node.accept_visitor(loop_variable) + if loop_variable.detected: + node.nodes[-1].has_loop_context = True + match = _FOR_LOOP.match(node.text) + if match: + printer.writelines( + "loop = __M_loop._enter(%s)" % match.group(2), + "try:" + # 'with __M_loop(%s) as loop:' % match.group(2) + ) + text = "for %s in loop:" % match.group(1) + else: + raise SyntaxError("Couldn't apply loop context: %s" % node.text) + else: + text = node.text + return text + + +class LoopVariable: + + """A node visitor which looks for the name 'loop' within undeclared + identifiers.""" + + def __init__(self): + self.detected = False + + def _loop_reference_detected(self, node): + if "loop" in node.undeclared_identifiers(): + self.detected = True + else: + for n in node.get_children(): + n.accept_visitor(self) + + def visitControlLine(self, node): + self._loop_reference_detected(node) + + def visitCode(self, node): + self._loop_reference_detected(node) + + def visitExpression(self, node): + self._loop_reference_detected(node) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/compat.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..1c1760124768841121dc3c09cdefe7159995674c --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/compat.py @@ -0,0 +1,70 @@ +# mako/compat.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import collections +from importlib import metadata as importlib_metadata +from importlib import util +import inspect +import sys + +win32 = sys.platform.startswith("win") +pypy = hasattr(sys, "pypy_version_info") + +ArgSpec = collections.namedtuple( + "ArgSpec", ["args", "varargs", "keywords", "defaults"] +) + + +def inspect_getargspec(func): + """getargspec based on fully vendored getfullargspec from Python 3.3.""" + + if inspect.ismethod(func): + func = func.__func__ + if not inspect.isfunction(func): + raise TypeError(f"{func!r} is not a Python function") + + co = func.__code__ + if not inspect.iscode(co): + raise TypeError(f"{co!r} is not a code object") + + nargs = co.co_argcount + names = co.co_varnames + nkwargs = co.co_kwonlyargcount + args = list(names[:nargs]) + + nargs += nkwargs + varargs = None + if co.co_flags & inspect.CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & inspect.CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + + return ArgSpec(args, varargs, varkw, func.__defaults__) + + +def load_module(module_id, path): + spec = util.spec_from_file_location(module_id, path) + module = util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def exception_as(): + return sys.exc_info()[1] + + +def exception_name(exc): + return exc.__class__.__name__ + + +def importlib_metadata_get(group): + ep = importlib_metadata.entry_points() + if hasattr(ep, "select"): + return ep.select(group=group) + else: + return ep.get(group, ()) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/exceptions.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..6aeb2d274e3509cca3278ac9d5da21d8294aa02e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/exceptions.py @@ -0,0 +1,417 @@ +# mako/exceptions.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""exception classes""" + +import sys +import traceback + +from mako import compat +from mako import util + + +class MakoException(Exception): + pass + + +class RuntimeException(MakoException): + pass + + +def _format_filepos(lineno, pos, filename): + if filename is None: + return " at line: %d char: %d" % (lineno, pos) + else: + return " in file '%s' at line: %d char: %d" % (filename, lineno, pos) + + +class CompileException(MakoException): + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, message + _format_filepos(lineno, pos, filename) + ) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class SyntaxException(MakoException): + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, message + _format_filepos(lineno, pos, filename) + ) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class UnsupportedError(MakoException): + + """raised when a retired feature is used.""" + + +class NameConflictError(MakoException): + + """raised when a reserved word is used inappropriately""" + + +class TemplateLookupException(MakoException): + pass + + +class TopLevelLookupException(TemplateLookupException): + pass + + +class RichTraceback: + + """Pull the current exception from the ``sys`` traceback and extracts + Mako-specific template information. + + See the usage examples in :ref:`handling_exceptions`. + + """ + + def __init__(self, error=None, traceback=None): + self.source, self.lineno = "", 0 + + if error is None or traceback is None: + t, value, tback = sys.exc_info() + + if error is None: + error = value or t + + if traceback is None: + traceback = tback + + self.error = error + self.records = self._init(traceback) + + if isinstance(self.error, (CompileException, SyntaxException)): + self.source = self.error.source + self.lineno = self.error.lineno + self._has_source = True + + self._init_message() + + @property + def errorname(self): + return compat.exception_name(self.error) + + def _init_message(self): + """Find a unicode representation of self.error""" + try: + self.message = str(self.error) + except UnicodeError: + try: + self.message = str(self.error) + except UnicodeEncodeError: + # Fallback to args as neither unicode nor + # str(Exception(u'\xe6')) work in Python < 2.6 + self.message = self.error.args[0] + if not isinstance(self.message, str): + self.message = str(self.message, "ascii", "replace") + + def _get_reformatted_records(self, records): + for rec in records: + if rec[6] is not None: + yield (rec[4], rec[5], rec[2], rec[6]) + else: + yield tuple(rec[0:4]) + + @property + def traceback(self): + """Return a list of 4-tuple traceback records (i.e. normal python + format) with template-corresponding lines remapped to the originating + template. + + """ + return list(self._get_reformatted_records(self.records)) + + @property + def reverse_records(self): + return reversed(self.records) + + @property + def reverse_traceback(self): + """Return the same data as traceback, except in reverse order.""" + + return list(self._get_reformatted_records(self.reverse_records)) + + def _init(self, trcback): + """format a traceback from sys.exc_info() into 7-item tuples, + containing the regular four traceback tuple items, plus the original + template filename, the line number adjusted relative to the template + source, and code line from that line number of the template.""" + + import mako.template + + mods = {} + rawrecords = traceback.extract_tb(trcback) + new_trcback = [] + for filename, lineno, function, line in rawrecords: + if not line: + line = "" + try: + (line_map, template_lines, template_filename) = mods[filename] + except KeyError: + try: + info = mako.template._get_module_info(filename) + module_source = info.code + template_source = info.source + template_filename = ( + info.template_filename or info.template_uri or filename + ) + except KeyError: + # A normal .py file (not a Template) + new_trcback.append( + ( + filename, + lineno, + function, + line, + None, + None, + None, + None, + ) + ) + continue + + template_ln = 1 + + mtm = mako.template.ModuleInfo + source_map = mtm.get_module_source_metadata( + module_source, full_line_map=True + ) + line_map = source_map["full_line_map"] + + template_lines = [ + line_ for line_ in template_source.split("\n") + ] + mods[filename] = (line_map, template_lines, template_filename) + + template_ln = line_map[lineno - 1] + + if template_ln <= len(template_lines): + template_line = template_lines[template_ln - 1] + else: + template_line = None + new_trcback.append( + ( + filename, + lineno, + function, + line, + template_filename, + template_ln, + template_line, + template_source, + ) + ) + if not self.source: + for l in range(len(new_trcback) - 1, 0, -1): + if new_trcback[l][5]: + self.source = new_trcback[l][7] + self.lineno = new_trcback[l][5] + break + else: + if new_trcback: + try: + # A normal .py file (not a Template) + with open(new_trcback[-1][0], "rb") as fp: + encoding = util.parse_encoding(fp) + if not encoding: + encoding = "utf-8" + fp.seek(0) + self.source = fp.read() + if encoding: + self.source = self.source.decode(encoding) + except IOError: + self.source = "" + self.lineno = new_trcback[-1][1] + return new_trcback + + +def text_error_template(lookup=None): + """Provides a template that renders a stack trace in a similar format to + the Python interpreter, substituting source template filenames, line + numbers and code for that of the originating source template, as + applicable. + + """ + import mako.template + + return mako.template.Template( + r""" +<%page args="error=None, traceback=None"/> +<%! + from mako.exceptions import RichTraceback +%>\ +<% + tback = RichTraceback(error=error, traceback=traceback) +%>\ +Traceback (most recent call last): +% for (filename, lineno, function, line) in tback.traceback: + File "${filename}", line ${lineno}, in ${function or '?'} + ${line | trim} +% endfor +${tback.errorname}: ${tback.message} +""" + ) + + +def _install_pygments(): + global syntax_highlight, pygments_html_formatter + from mako.ext.pygmentplugin import syntax_highlight # noqa + from mako.ext.pygmentplugin import pygments_html_formatter # noqa + + +def _install_fallback(): + global syntax_highlight, pygments_html_formatter + from mako.filters import html_escape + + pygments_html_formatter = None + + def syntax_highlight(filename="", language=None): + return html_escape + + +def _install_highlighting(): + try: + _install_pygments() + except ImportError: + _install_fallback() + + +_install_highlighting() + + +def html_error_template(): + """Provides a template that renders a stack trace in an HTML format, + providing an excerpt of code as well as substituting source template + filenames, line numbers and code for that of the originating source + template, as applicable. + + The template's default ``encoding_errors`` value is + ``'htmlentityreplace'``. The template has two options. With the + ``full`` option disabled, only a section of an HTML document is + returned. With the ``css`` option disabled, the default stylesheet + won't be included. + + """ + import mako.template + + return mako.template.Template( + r""" +<%! + from mako.exceptions import RichTraceback, syntax_highlight,\ + pygments_html_formatter +%> +<%page args="full=True, css=True, error=None, traceback=None"/> +% if full: + + + Mako Runtime Error +% endif +% if css: + +% endif +% if full: + + +% endif + +

Error !

+<% + tback = RichTraceback(error=error, traceback=traceback) + src = tback.source + line = tback.lineno + if src: + lines = src.split('\n') + else: + lines = None +%> +

${tback.errorname}: ${tback.message|h}

+ +% if lines: +
+
+% for index in range(max(0, line-4),min(len(lines), line+5)): + <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = index + 1 + %> + % if index + 1 == line: + <% + if pygments_html_formatter: + old_cssclass = pygments_html_formatter.cssclass + pygments_html_formatter.cssclass = 'error ' + old_cssclass + %> + ${lines[index] | syntax_highlight(language='mako')} + <% + if pygments_html_formatter: + pygments_html_formatter.cssclass = old_cssclass + %> + % else: + ${lines[index] | syntax_highlight(language='mako')} + % endif +% endfor +
+
+% endif + +
+% for (filename, lineno, function, line) in tback.reverse_traceback: +
${filename}, line ${lineno}:
+
+ <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = lineno + %> +
${line | syntax_highlight(filename)}
+
+% endfor +
+ +% if full: + + +% endif +""", + output_encoding=sys.getdefaultencoding(), + encoding_errors="htmlentityreplace", + ) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/autohandler.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/autohandler.py new file mode 100644 index 0000000000000000000000000000000000000000..829ed097ce1dd90fd517760f0f49ee23d41cf8c6 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/autohandler.py @@ -0,0 +1,70 @@ +# ext/autohandler.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""adds autohandler functionality to Mako templates. + +requires that the TemplateLookup class is used with templates. + +usage:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> + + +or with custom autohandler filename:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> + +""" + +import os +import posixpath +import re + + +def autohandler(template, context, name="autohandler"): + lookup = context.lookup + _template_uri = template.module._template_uri + if not lookup.filesystem_checks: + try: + return lookup._uri_cache[(autohandler, _template_uri, name)] + except KeyError: + pass + + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] + while len(tokens): + path = "/" + "/".join(tokens) + if path != _template_uri and _file_exists(lookup, path): + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), path + ) + else: + return path + if len(tokens) == 1: + break + tokens[-2:] = [name] + + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), None + ) + else: + return None + + +def _file_exists(lookup, path): + psub = re.sub(r"^/", "", path) + for d in lookup.directories: + if os.path.exists(d + "/" + psub): + return True + else: + return False diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/babelplugin.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/babelplugin.py new file mode 100644 index 0000000000000000000000000000000000000000..541ea54091c80974ab9091ad478692673816befa --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/babelplugin.py @@ -0,0 +1,57 @@ +# ext/babelplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""gettext message extraction via Babel: https://pypi.org/project/Babel/""" +from babel.messages.extract import extract_python + +from mako.ext.extract import MessageExtractor + + +class BabelMakoExtractor(MessageExtractor): + def __init__(self, keywords, comment_tags, options): + self.keywords = keywords + self.options = options + self.config = { + "comment-tags": " ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), + } + super().__init__() + + def __call__(self, fileobj): + return self.process_file(fileobj) + + def process_python(self, code, code_lineno, translator_strings): + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) + + +def extract(fileobj, keywords, comment_tags, options): + """Extract messages from Mako templates. + + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :return: an iterator over ``(lineno, funcname, message, comments)`` tuples + :rtype: ``iterator`` + """ + extractor = BabelMakoExtractor(keywords, comment_tags, options) + yield from extractor(fileobj) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/beaker_cache.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/beaker_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..b1392f30f684de1dc830888a11337d20afb478bc --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/beaker_cache.py @@ -0,0 +1,82 @@ +# ext/beaker_cache.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provide a :class:`.CacheImpl` for the Beaker caching system.""" + +from mako import exceptions +from mako.cache import CacheImpl + +try: + from beaker import cache as beaker_cache +except: + has_beaker = False +else: + has_beaker = True + +_beaker_cache = None + + +class BeakerCacheImpl(CacheImpl): + + """A :class:`.CacheImpl` provided for the Beaker caching system. + + This plugin is used by default, based on the default + value of ``'beaker'`` for the ``cache_impl`` parameter of the + :class:`.Template` or :class:`.TemplateLookup` classes. + + """ + + def __init__(self, cache): + if not has_beaker: + raise exceptions.RuntimeException( + "Can't initialize Beaker plugin; Beaker is not installed." + ) + global _beaker_cache + if _beaker_cache is None: + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] + else: + _beaker_cache = beaker_cache.CacheManager() + super().__init__(cache) + + def _get_cache(self, **kw): + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") + elif self.cache.template.module_directory: + kw["data_dir"] = self.cache.template.module_directory + + if "manager" in kw: + kw.pop("manager") + + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" + + if "region" in kw: + region = kw.pop("region") + cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) + else: + cache = _beaker_cache.get_cache(self.cache.id, **kw) + cache_args = {"starttime": self.cache.starttime} + if expiretime: + cache_args["expiretime"] = expiretime + return cache, cache_args + + def get_or_create(self, key, creation_function, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, createfunc=creation_function, **kw) + + def put(self, key, value, **kw): + cache, kw = self._get_cache(**kw) + cache.put(key, value, **kw) + + def get(self, key, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, **kw) + + def invalidate(self, key, **kw): + cache, kw = self._get_cache(**kw) + cache.remove_value(key, **kw) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/extract.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/extract.py new file mode 100644 index 0000000000000000000000000000000000000000..609c5c29882b36165a381f527938f939c55010ee --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/extract.py @@ -0,0 +1,129 @@ +# ext/extract.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from io import BytesIO +from io import StringIO +import re + +from mako import lexer +from mako import parsetree + + +class MessageExtractor: + use_bytes = True + + def process_file(self, fileobj): + template_node = lexer.Lexer( + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() + yield from self.extract_nodes(template_node.get_children()) + + def extract_nodes(self, nodes): + translator_comments = [] + in_translator_comments = False + input_encoding = self.config["encoding"] or "ascii" + comment_tags = list( + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) + + for node in nodes: + child_nodes = None + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): + # Ignore whitespace within translator comments + continue + + if isinstance(node, parsetree.Comment): + value = node.text.strip() + if in_translator_comments: + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + + if isinstance(node, parsetree.DefTag): + code = node.function_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.BlockTag): + code = node.body_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.CallTag): + code = node.code.code + child_nodes = node.nodes + elif isinstance(node, parsetree.PageTag): + code = node.body_decl.code + elif isinstance(node, parsetree.CallNamespaceTag): + code = node.expression + child_nodes = node.nodes + elif isinstance(node, parsetree.ControlLine): + if node.isend: + in_translator_comments = False + continue + code = node.text + elif isinstance(node, parsetree.Code): + in_translator_comments = False + code = node.code.code + elif isinstance(node, parsetree.Expression): + code = node.code.code + else: + continue + + # Comments don't apply unless they immediately precede the message + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): + translator_comments = [] + + translator_strings = [ + comment[1] for comment in translator_comments + ] + + if isinstance(code, str) and self.use_bytes: + code = code.encode(input_encoding, "backslashreplace") + + used_translator_comments = False + # We add extra newline to work around a pybabel bug + # (see python-babel/babel#274, parse_encoding dies if the first + # input string of the input is non-ascii) + # Also, because we added it, we have to subtract one from + # node.lineno + if self.use_bytes: + code = BytesIO(b"\n" + code) + else: + code = StringIO("\n" + code) + + for message in self.process_python( + code, node.lineno - 1, translator_strings + ): + yield message + used_translator_comments = True + + if used_translator_comments: + translator_comments = [] + in_translator_comments = False + + if child_nodes: + yield from self.extract_nodes(child_nodes) + + @staticmethod + def _split_comment(lineno, comment): + """Return the multiline comment at lineno split into a list of + comment line numbers and the accompanying comment line""" + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/linguaplugin.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/linguaplugin.py new file mode 100644 index 0000000000000000000000000000000000000000..118668de2c19d987a15eea5f0e29ca3c9a068035 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/linguaplugin.py @@ -0,0 +1,57 @@ +# ext/linguaplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import contextlib +import io + +from lingua.extractors import Extractor +from lingua.extractors import get_extractor +from lingua.extractors import Message + +from mako.ext.extract import MessageExtractor + + +class LinguaMakoExtractor(Extractor, MessageExtractor): + """Mako templates""" + + use_bytes = False + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} + + def __call__(self, filename, options, fileobj=None): + self.options = options + self.filename = filename + self.python_extractor = get_extractor("x.py") + if fileobj is None: + ctx = open(filename, "r") + else: + ctx = contextlib.nullcontext(fileobj) + with ctx as file_: + yield from self.process_file(file_) + + def process_python(self, code, code_lineno, translator_strings): + source = code.getvalue().strip() + if source.endswith(":"): + if source in ("try:", "else:") or source.startswith("except"): + source = "" # Ignore try/except and else + elif source.startswith("elif"): + source = source[2:] # Replace "elif" with "if" + source += "pass" + code = io.StringIO(source) + for msg in self.python_extractor( + self.filename, self.options, code, code_lineno - 1 + ): + if translator_strings: + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + " ".join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) + yield msg diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/preprocessors.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/preprocessors.py new file mode 100644 index 0000000000000000000000000000000000000000..21f2db54db507b236146432c39d4b4cc40bb2f9a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/preprocessors.py @@ -0,0 +1,20 @@ +# ext/preprocessors.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""preprocessing functions, used with the 'preprocessor' +argument on Template, TemplateLookup""" + +import re + + +def convert_comments(text): + """preprocess old style comments. + + example: + + from mako.ext.preprocessors import convert_comments + t = Template(..., preprocessor=convert_comments)""" + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py new file mode 100644 index 0000000000000000000000000000000000000000..74a8fb9dbcf6578b30ba8874ac31838c221ac19e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py @@ -0,0 +1,150 @@ +# ext/pygmentplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + + +class MakoLexer(RegexLexer): + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] + + tokens = { + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"()", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(?s)(<%(?:!?))(.*?)(%>)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=", Comment.Preproc, "#pop"), + (r"\s+", Text), + ], + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), + ], + } + + +class MakoHtmlLexer(DelegatingLexer): + name = "HTML+Mako" + aliases = ["html+mako"] + + def __init__(self, **options): + super().__init__(HtmlLexer, MakoLexer, **options) + + +class MakoXmlLexer(DelegatingLexer): + name = "XML+Mako" + aliases = ["xml+mako"] + + def __init__(self, **options): + super().__init__(XmlLexer, MakoLexer, **options) + + +class MakoJavascriptLexer(DelegatingLexer): + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] + + def __init__(self, **options): + super().__init__(JavascriptLexer, MakoLexer, **options) + + +class MakoCssLexer(DelegatingLexer): + name = "CSS+Mako" + aliases = ["css+mako"] + + def __init__(self, **options): + super().__init__(CssLexer, MakoLexer, **options) + + +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) + + +def syntax_highlight(filename="", language=None): + mako_lexer = MakoLexer() + python_lexer = Python3Lexer() + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/turbogears.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/turbogears.py new file mode 100644 index 0000000000000000000000000000000000000000..25ad35e7746b82a2e65a4c33c70da701912ccf00 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/ext/turbogears.py @@ -0,0 +1,61 @@ +# ext/turbogears.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import compat +from mako.lookup import TemplateLookup +from mako.template import Template + + +class TGPlugin: + + """TurboGears compatible Template Plugin.""" + + def __init__(self, extra_vars_func=None, options=None, extension="mak"): + self.extra_vars_func = extra_vars_func + self.extension = extension + if not options: + options = {} + + # Pull the options out and initialize the lookup + lookup_options = {} + for k, v in options.items(): + if k.startswith("mako."): + lookup_options[k[5:]] = v + elif k in ["directories", "filesystem_checks", "module_directory"]: + lookup_options[k] = v + self.lookup = TemplateLookup(**lookup_options) + + self.tmpl_options = {} + # transfer lookup args to template args, based on those available + # in getargspec + for kw in compat.inspect_getargspec(Template.__init__)[0]: + if kw in lookup_options: + self.tmpl_options[kw] = lookup_options[kw] + + def load_template(self, templatename, template_string=None): + """Loads a template from a file or a string""" + if template_string is not None: + return Template(template_string, **self.tmpl_options) + # Translate TG dot notation to normal / template path + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) + + # Lookup template + return self.lookup.get_template(templatename) + + def render( + self, info, format="html", fragment=False, template=None # noqa + ): + if isinstance(template, str): + template = self.load_template(template) + + # Load extra vars func if provided + if self.extra_vars_func: + info.update(self.extra_vars_func()) + + return template.render(**info) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/filters.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/filters.py new file mode 100644 index 0000000000000000000000000000000000000000..d5338bd9cf2972e7d7a956920c24f62cba39f709 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/filters.py @@ -0,0 +1,163 @@ +# mako/filters.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import codecs +from html.entities import codepoint2name +from html.entities import name2codepoint +import re +from urllib.parse import quote_plus + +import markupsafe + +html_escape = markupsafe.escape + +xml_escapes = { + "&": "&", + ">": ">", + "<": "<", + '"': """, # also " in html-only + "'": "'", # also ' in html-only +} + + +def xml_escape(string): + return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) + + +def url_escape(string): + # convert into a list of octets + string = string.encode("utf8") + return quote_plus(string) + + +def trim(string): + return string.strip() + + +class Decode: + def __getattr__(self, key): + def decode(x): + if isinstance(x, str): + return x + elif not isinstance(x, bytes): + return decode(str(x)) + else: + return str(x, encoding=key) + + return decode + + +decode = Decode() + + +class XMLEntityEscaper: + def __init__(self, codepoint2name, name2codepoint): + self.codepoint2entity = { + c: str("&%s;" % n) for c, n in codepoint2name.items() + } + self.name2codepoint = name2codepoint + + def escape_entities(self, text): + """Replace characters with their character entity references. + + Only characters corresponding to a named entity are replaced. + """ + return str(text).translate(self.codepoint2entity) + + def __escape(self, m): + codepoint = ord(m.group()) + try: + return self.codepoint2entity[codepoint] + except (KeyError, IndexError): + return "&#x%X;" % codepoint + + __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') + + def escape(self, text): + """Replace characters with their character references. + + Replace characters by their named entity references. + Non-ASCII characters, if they do not have a named entity reference, + are replaced by numerical character references. + + The return value is guaranteed to be ASCII. + """ + return self.__escapable.sub(self.__escape, str(text)).encode("ascii") + + # XXX: This regexp will not match all valid XML entity names__. + # (It punts on details involving involving CombiningChars and Extenders.) + # + # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef + __characterrefs = re.compile( + r"""& (?: + \#(\d+) + | \#x([\da-f]+) + | ( (?!\d) [:\w] [-.:\w]+ ) + ) ;""", + re.X | re.UNICODE, + ) + + def __unescape(self, m): + dval, hval, name = m.groups() + if dval: + codepoint = int(dval) + elif hval: + codepoint = int(hval, 16) + else: + codepoint = self.name2codepoint.get(name, 0xFFFD) + # U+FFFD = "REPLACEMENT CHARACTER" + if codepoint < 128: + return chr(codepoint) + return chr(codepoint) + + def unescape(self, text): + """Unescape character references. + + All character references (both entity references and numerical + character references) are unescaped. + """ + return self.__characterrefs.sub(self.__unescape, text) + + +_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint) + +html_entities_escape = _html_entities_escaper.escape_entities +html_entities_unescape = _html_entities_escaper.unescape + + +def htmlentityreplace_errors(ex): + """An encoding error handler. + + This python codecs error handler replaces unencodable + characters with HTML entities, or, if no HTML entity exists for + the character, XML character references:: + + >>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' + """ + if isinstance(ex, UnicodeEncodeError): + # Handle encoding errors + bad_text = ex.object[ex.start : ex.end] + text = _html_entities_escaper.escape(bad_text) + return (str(text), ex.end) + raise ex + + +codecs.register_error("htmlentityreplace", htmlentityreplace_errors) + + +DEFAULT_ESCAPES = { + "x": "filters.xml_escape", + "h": "filters.html_escape", + "u": "filters.url_escape", + "trim": "filters.trim", + "entity": "filters.html_entities_escape", + "unicode": "str", + "decode": "decode", + "str": "str", + "n": "n", +} diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lexer.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lexer.py new file mode 100644 index 0000000000000000000000000000000000000000..37afb4de2991fb281e2eac7d2acfdebc3e53d364 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lexer.py @@ -0,0 +1,481 @@ +# mako/lexer.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides the Lexer class for parsing template strings into parse trees.""" + +import codecs +import re + +from mako import exceptions +from mako import parsetree +from mako.pygen import adjust_whitespace + +_regexp_cache = {} + + +class Lexer: + def __init__( + self, text, filename=None, input_encoding=None, preprocessor=None + ): + self.text = text + self.filename = filename + self.template = parsetree.TemplateNode(self.filename) + self.matched_lineno = 1 + self.matched_charpos = 0 + self.lineno = 1 + self.match_position = 0 + self.tag = [] + self.control_line = [] + self.ternary_stack = [] + self.encoding = input_encoding + + if preprocessor is None: + self.preprocessor = [] + elif not hasattr(preprocessor, "__iter__"): + self.preprocessor = [preprocessor] + else: + self.preprocessor = preprocessor + + @property + def exception_kwargs(self): + return { + "source": self.text, + "lineno": self.matched_lineno, + "pos": self.matched_charpos, + "filename": self.filename, + } + + def match(self, regexp, flags=None): + """compile the given regexp, cache the reg, and call match_reg().""" + + try: + reg = _regexp_cache[(regexp, flags)] + except KeyError: + reg = re.compile(regexp, flags) if flags else re.compile(regexp) + _regexp_cache[(regexp, flags)] = reg + + return self.match_reg(reg) + + def match_reg(self, reg): + """match the given regular expression object to the current text + position. + + if a match occurs, update the current text and line position. + + """ + + mp = self.match_position + + match = reg.match(self.text, self.match_position) + if match: + (start, end) = match.span() + self.match_position = end + 1 if end == start else end + self.matched_lineno = self.lineno + cp = mp - 1 + if cp >= 0 and cp < self.textlength: + cp = self.text[: cp + 1].rfind("\n") + self.matched_charpos = mp - cp + self.lineno += self.text[mp : self.match_position].count("\n") + return match + + def parse_until_text(self, watch_nesting, *text): + startpos = self.match_position + text_re = r"|".join(text) + brace_level = 0 + paren_level = 0 + bracket_level = 0 + while True: + match = self.match(r"#.*\n") + if match: + continue + match = self.match( + r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S + ) + if match: + continue + match = self.match(r"(%s)" % text_re) + if match and not ( + watch_nesting + and (brace_level > 0 or paren_level > 0 or bracket_level > 0) + ): + return ( + self.text[ + startpos : self.match_position - len(match.group(1)) + ], + match.group(1), + ) + elif not match: + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + if match: + brace_level += match.group(1).count("{") + brace_level -= match.group(1).count("}") + paren_level += match.group(1).count("(") + paren_level -= match.group(1).count(")") + bracket_level += match.group(1).count("[") + bracket_level -= match.group(1).count("]") + continue + raise exceptions.SyntaxException( + "Expected: %s" % ",".join(text), **self.exception_kwargs + ) + + def append_node(self, nodecls, *args, **kwargs): + kwargs.setdefault("source", self.text) + kwargs.setdefault("lineno", self.matched_lineno) + kwargs.setdefault("pos", self.matched_charpos) + kwargs["filename"] = self.filename + node = nodecls(*args, **kwargs) + if len(self.tag): + self.tag[-1].nodes.append(node) + else: + self.template.nodes.append(node) + # build a set of child nodes for the control line + # (used for loop variable detection) + # also build a set of child nodes on ternary control lines + # (used for determining if a pass needs to be auto-inserted + if self.control_line: + control_frame = self.control_line[-1] + control_frame.nodes.append(node) + if ( + not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ) + and self.ternary_stack + and self.ternary_stack[-1] + ): + self.ternary_stack[-1][-1].nodes.append(node) + if isinstance(node, parsetree.Tag): + if len(self.tag): + node.parent = self.tag[-1] + self.tag.append(node) + elif isinstance(node, parsetree.ControlLine): + if node.isend: + self.control_line.pop() + self.ternary_stack.pop() + elif node.is_primary: + self.control_line.append(node) + self.ternary_stack.append([]) + elif self.control_line and self.control_line[-1].is_ternary( + node.keyword + ): + self.ternary_stack[-1].append(node) + elif self.control_line and not self.control_line[-1].is_ternary( + node.keyword + ): + raise exceptions.SyntaxException( + "Keyword '%s' not a legal ternary for keyword '%s'" + % (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + + _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") + + def decode_raw_stream(self, text, decode_raw, known_encoding, filename): + """given string/unicode or bytes/string, determine encoding + from magic encoding comment, return body as unicode + or raw if decode_raw=False + + """ + if isinstance(text, str): + m = self._coding_re.match(text) + encoding = m and m.group(1) or known_encoding or "utf-8" + return encoding, text + + if text.startswith(codecs.BOM_UTF8): + text = text[len(codecs.BOM_UTF8) :] + parsed_encoding = "utf-8" + m = self._coding_re.match(text.decode("utf-8", "ignore")) + if m is not None and m.group(1) != "utf-8": + raise exceptions.CompileException( + "Found utf-8 BOM in file, with conflicting " + "magic encoding comment of '%s'" % m.group(1), + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + else: + m = self._coding_re.match(text.decode("utf-8", "ignore")) + parsed_encoding = m.group(1) if m else known_encoding or "utf-8" + if decode_raw: + try: + text = text.decode(parsed_encoding) + except UnicodeDecodeError: + raise exceptions.CompileException( + "Unicode decode operation of encoding '%s' failed" + % parsed_encoding, + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + + return parsed_encoding, text + + def parse(self): + self.encoding, self.text = self.decode_raw_stream( + self.text, True, self.encoding, self.filename + ) + + for preproc in self.preprocessor: + self.text = preproc(self.text) + + # push the match marker past the + # encoding comment. + self.match_reg(self._coding_re) + + self.textlength = len(self.text) + + while True: + if self.match_position > self.textlength: + break + + if self.match_end(): + break + if self.match_expression(): + continue + if self.match_control_line(): + continue + if self.match_comment(): + continue + if self.match_tag_start(): + continue + if self.match_tag_end(): + continue + if self.match_python_block(): + continue + if self.match_percent(): + continue + if self.match_text(): + continue + + if self.match_position > self.textlength: + break + # TODO: no coverage here + raise exceptions.MakoException("assertion failed") + + if len(self.tag): + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + if len(self.control_line): + raise exceptions.SyntaxException( + "Unterminated control keyword: '%s'" + % self.control_line[-1].keyword, + self.text, + self.control_line[-1].lineno, + self.control_line[-1].pos, + self.filename, + ) + return self.template + + def match_tag_start(self): + reg = r""" + \<% # opening tag + + ([\w\.\:]+) # keyword + + ((?:\s+\w+|\s*=\s*|"[^"]*?"|'[^']*?'|\s*,\s*)*) # attrname, = \ + # sign, string expression + # comma is for backwards compat + # identified in #366 + + \s* # more whitespace + + (/)?> # closing + + """ + + match = self.match( + reg, + re.I | re.S | re.X, + ) + + if not match: + return False + + keyword, attr, isend = match.groups() + self.keyword = keyword + attributes = {} + if attr: + for att in re.findall( + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): + key, val1, val2 = att + text = val1 or val2 + text = text.replace("\r\n", "\n") + attributes[key] = text + self.append_node(parsetree.Tag, keyword, attributes) + if isend: + self.tag.pop() + elif keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) + if not match: + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + self.append_node(parsetree.Text, match.group(1)) + return self.match_tag_end() + return True + + def match_tag_end(self): + match = self.match(r"\") + if match: + if not len(self.tag): + raise exceptions.SyntaxException( + "Closing tag without opening tag: " + % match.group(1), + **self.exception_kwargs, + ) + elif self.tag[-1].keyword != match.group(1): + raise exceptions.SyntaxException( + "Closing tag does not match tag: <%%%s>" + % (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs, + ) + self.tag.pop() + return True + else: + return False + + def match_end(self): + match = self.match(r"\Z", re.S) + if not match: + return False + + string = match.group() + if string: + return string + else: + return True + + def match_percent(self): + match = self.match(r"(?<=^)(\s*)%%(%*)", re.M) + if match: + self.append_node( + parsetree.Text, match.group(1) + "%" + match.group(2) + ) + return True + else: + return False + + def match_text(self): + match = self.match( + r""" + (.*?) # anything, followed by: + ( + (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based + # comment, preceded by a + # consumed newline and whitespace + | + (?=\${) # an expression + | + (?=") + # the trailing newline helps + # compiler.parse() not complain about indentation + text = adjust_whitespace(text) + "\n" + self.append_node( + parsetree.Code, + text, + match.group(1) == "!", + lineno=line, + pos=pos, + ) + return True + else: + return False + + def match_expression(self): + match = self.match(r"\${") + if not match: + return False + + line, pos = self.matched_lineno, self.matched_charpos + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") + else: + escapes = "" + text = text.replace("\r\n", "\n") + self.append_node( + parsetree.Expression, + text, + escapes.strip(), + lineno=line, + pos=pos, + ) + return True + + def match_control_line(self): + match = self.match( + r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\\r?\n)|[^\r\n])*)" + r"(?:\r?\n|\Z)", + re.M, + ) + if not match: + return False + + operator = match.group(1) + text = match.group(2) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) + if not m2: + raise exceptions.SyntaxException( + "Invalid control line: '%s'" % text, + **self.exception_kwargs, + ) + isend, keyword = m2.group(1, 2) + isend = isend is not None + + if isend: + if not len(self.control_line): + raise exceptions.SyntaxException( + "No starting keyword '%s' for '%s'" % (keyword, text), + **self.exception_kwargs, + ) + elif self.control_line[-1].keyword != keyword: + raise exceptions.SyntaxException( + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + self.append_node(parsetree.ControlLine, keyword, isend, text) + else: + self.append_node(parsetree.Comment, text) + return True + + def match_comment(self): + """matches the multiline version of a comment""" + match = self.match(r"<%doc>(.*?)", re.S) + if match: + self.append_node(parsetree.Comment, match.group(1)) + return True + else: + return False diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lookup.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lookup.py new file mode 100644 index 0000000000000000000000000000000000000000..1a431c8059bf58680d5d699104c0c5c09ee215b7 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/lookup.py @@ -0,0 +1,361 @@ +# mako/lookup.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import os +import posixpath +import re +import stat +import threading + +from mako import exceptions +from mako import util +from mako.template import Template + + +class TemplateCollection: + + """Represent a collection of :class:`.Template` objects, + identifiable via URI. + + A :class:`.TemplateCollection` is linked to the usage of + all template tags that address other templates, such + as ``<%include>``, ``<%namespace>``, and ``<%inherit>``. + The ``file`` attribute of each of those tags refers + to a string URI that is passed to that :class:`.Template` + object's :class:`.TemplateCollection` for resolution. + + :class:`.TemplateCollection` is an abstract class, + with the usual default implementation being :class:`.TemplateLookup`. + + """ + + def has_template(self, uri): + """Return ``True`` if this :class:`.TemplateLookup` is + capable of returning a :class:`.Template` object for the + given ``uri``. + + :param uri: String URI of the template to be resolved. + + """ + try: + self.get_template(uri) + return True + except exceptions.TemplateLookupException: + return False + + def get_template(self, uri, relativeto=None): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + The default implementation raises + :class:`.NotImplementedError`. Implementations should + raise :class:`.TemplateLookupException` if the given ``uri`` + cannot be resolved. + + :param uri: String URI of the template to be resolved. + :param relativeto: if present, the given ``uri`` is assumed to + be relative to this URI. + + """ + raise NotImplementedError() + + def filename_to_uri(self, uri, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + return uri + + def adjust_uri(self, uri, filename): + """Adjust the given ``uri`` based on the calling ``filename``. + + When this method is called from the runtime, the + ``filename`` parameter is taken directly to the ``filename`` + attribute of the calling template. Therefore a custom + :class:`.TemplateCollection` subclass can place any string + identifier desired in the ``filename`` parameter of the + :class:`.Template` objects it constructs and have them come back + here. + + """ + return uri + + +class TemplateLookup(TemplateCollection): + + """Represent a collection of templates that locates template source files + from the local filesystem. + + The primary argument is the ``directories`` argument, the list of + directories to search: + + .. sourcecode:: python + + lookup = TemplateLookup(["/path/to/templates"]) + some_template = lookup.get_template("/index.html") + + The :class:`.TemplateLookup` can also be given :class:`.Template` objects + programatically using :meth:`.put_string` or :meth:`.put_template`: + + .. sourcecode:: python + + lookup = TemplateLookup() + lookup.put_string("base.html", ''' + ${self.next()} + ''') + lookup.put_string("hello.html", ''' + <%include file='base.html'/> + + Hello, world ! + ''') + + + :param directories: A list of directory names which will be + searched for a particular template URI. The URI is appended + to each directory and the filesystem checked. + + :param collection_size: Approximate size of the collection used + to store templates. If left at its default of ``-1``, the size + is unbounded, and a plain Python dictionary is used to + relate URI strings to :class:`.Template` instances. + Otherwise, a least-recently-used cache object is used which + will maintain the size of the collection approximately to + the number given. + + :param filesystem_checks: When at its default value of ``True``, + each call to :meth:`.TemplateLookup.get_template()` will + compare the filesystem last modified time to the time in + which an existing :class:`.Template` object was created. + This allows the :class:`.TemplateLookup` to regenerate a + new :class:`.Template` whenever the original source has + been updated. Set this to ``False`` for a very minor + performance increase. + + :param modulename_callable: A callable which, when present, + is passed the path of the source file as well as the + requested URI, and then returns the full path of the + generated Python module file. This is used to inject + alternate schemes for Python module location. If left at + its default of ``None``, the built in system of generation + based on ``module_directory`` plus ``uri`` is used. + + All other keyword parameters available for + :class:`.Template` are mirrored here. When new + :class:`.Template` objects are created, the keywords + established with this :class:`.TemplateLookup` are passed on + to each new :class:`.Template`. + + """ + + def __init__( + self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + output_encoding=None, + encoding_errors="strict", + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + self.directories = [ + posixpath.normpath(d) for d in util.to_list(directories, ()) + ] + self.module_directory = module_directory + self.modulename_callable = modulename_callable + self.filesystem_checks = filesystem_checks + self.collection_size = collection_size + + if cache_args is None: + cache_args = {} + # transfer deprecated cache_* args + if cache_dir: + cache_args.setdefault("dir", cache_dir) + if cache_url: + cache_args.setdefault("url", cache_url) + if cache_type: + cache_args.setdefault("type", cache_type) + + self.template_args = { + "format_exceptions": format_exceptions, + "error_handler": error_handler, + "include_error_handler": include_error_handler, + "output_encoding": output_encoding, + "cache_impl": cache_impl, + "encoding_errors": encoding_errors, + "input_encoding": input_encoding, + "module_directory": module_directory, + "module_writer": module_writer, + "cache_args": cache_args, + "cache_enabled": cache_enabled, + "default_filters": default_filters, + "buffer_filters": buffer_filters, + "strict_undefined": strict_undefined, + "imports": imports, + "future_imports": future_imports, + "enable_loop": enable_loop, + "preprocessor": preprocessor, + "lexer_cls": lexer_cls, + } + + if collection_size == -1: + self._collection = {} + self._uri_cache = {} + else: + self._collection = util.LRUCache(collection_size) + self._uri_cache = util.LRUCache(collection_size) + self._mutex = threading.Lock() + + def get_template(self, uri): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + .. note:: The ``relativeto`` argument is not supported here at + the moment. + + """ + + try: + if self.filesystem_checks: + return self._check(uri, self._collection[uri]) + else: + return self._collection[uri] + except KeyError as e: + u = re.sub(r"^\/+", "", uri) + for dir_ in self.directories: + # make sure the path seperators are posix - os.altsep is empty + # on POSIX and cannot be used. + dir_ = dir_.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir_, u)) + if os.path.isfile(srcfile): + return self._load(srcfile, uri) + else: + raise exceptions.TopLevelLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def adjust_uri(self, uri, relativeto): + """Adjust the given ``uri`` based on the given relative URI.""" + + key = (uri, relativeto) + if key in self._uri_cache: + return self._uri_cache[key] + + if uri[0] == "/": + v = self._uri_cache[key] = uri + elif relativeto is not None: + v = self._uri_cache[key] = posixpath.join( + posixpath.dirname(relativeto), uri + ) + else: + v = self._uri_cache[key] = "/" + uri + return v + + def filename_to_uri(self, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + try: + return self._uri_cache[filename] + except KeyError: + value = self._relativeize(filename) + self._uri_cache[filename] = value + return value + + def _relativeize(self, filename): + """Return the portion of a filename that is 'relative' + to the directories in this lookup. + + """ + + filename = posixpath.normpath(filename) + for dir_ in self.directories: + if filename[0 : len(dir_)] == dir_: + return filename[len(dir_) :] + else: + return None + + def _load(self, filename, uri): + self._mutex.acquire() + try: + try: + # try returning from collection one + # more time in case concurrent thread already loaded + return self._collection[uri] + except KeyError: + pass + try: + if self.modulename_callable is not None: + module_filename = self.modulename_callable(filename, uri) + else: + module_filename = None + self._collection[uri] = template = Template( + uri=uri, + filename=posixpath.normpath(filename), + lookup=self, + module_filename=module_filename, + **self.template_args, + ) + return template + except: + # if compilation fails etc, ensure + # template is removed from collection, + # re-raise + self._collection.pop(uri, None) + raise + finally: + self._mutex.release() + + def _check(self, uri, template): + if template.filename is None: + return template + + try: + template_stat = os.stat(template.filename) + if template.module._modified_time >= template_stat[stat.ST_MTIME]: + return template + self._collection.pop(uri, None) + return self._load(template.filename, uri) + except OSError as e: + self._collection.pop(uri, None) + raise exceptions.TemplateLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def put_string(self, uri, text): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given string of + ``text``. + + """ + self._collection[uri] = Template( + text, lookup=self, uri=uri, **self.template_args + ) + + def put_template(self, uri, template): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given + :class:`.Template` object. + + """ + self._collection[uri] = template diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/parsetree.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/parsetree.py new file mode 100644 index 0000000000000000000000000000000000000000..f7bdda8d670624401d0a3dc78faf596e170b828b --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/parsetree.py @@ -0,0 +1,656 @@ +# mako/parsetree.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""defines the parse tree components for Mako templates.""" + +import re + +from mako import ast +from mako import exceptions +from mako import filters +from mako import util + + +class Node: + + """base class for a Node in the parse tree.""" + + def __init__(self, source, lineno, pos, filename): + self.source = source + self.lineno = lineno + self.pos = pos + self.filename = filename + + @property + def exception_kwargs(self): + return { + "source": self.source, + "lineno": self.lineno, + "pos": self.pos, + "filename": self.filename, + } + + def get_children(self): + return [] + + def accept_visitor(self, visitor): + def traverse(node): + for n in node.get_children(): + n.accept_visitor(visitor) + + method = getattr(visitor, "visit" + self.__class__.__name__, traverse) + method(self) + + +class TemplateNode(Node): + + """a 'container' node that stores the overall collection of nodes.""" + + def __init__(self, filename): + super().__init__("", 0, 0, filename) + self.nodes = [] + self.page_attributes = {} + + def get_children(self): + return self.nodes + + def __repr__(self): + return "TemplateNode(%s, %r)" % ( + util.sorted_dict_repr(self.page_attributes), + self.nodes, + ) + + +class ControlLine(Node): + + """defines a control line, a line-oriented python line or end tag. + + e.g.:: + + % if foo: + (markup) + % endif + + """ + + has_loop_context = False + + def __init__(self, keyword, isend, text, **kwargs): + super().__init__(**kwargs) + self.text = text + self.keyword = keyword + self.isend = isend + self.is_primary = keyword in ["for", "if", "while", "try", "with"] + self.nodes = [] + if self.isend: + self._declared_identifiers = [] + self._undeclared_identifiers = [] + else: + code = ast.PythonFragment(text, **self.exception_kwargs) + self._declared_identifiers = code.declared_identifiers + self._undeclared_identifiers = code.undeclared_identifiers + + def get_children(self): + return self.nodes + + def declared_identifiers(self): + return self._declared_identifiers + + def undeclared_identifiers(self): + return self._undeclared_identifiers + + def is_ternary(self, keyword): + """return true if the given keyword is a ternary keyword + for this ControlLine""" + + cases = { + "if": {"else", "elif"}, + "try": {"except", "finally"}, + "for": {"else"}, + } + + return keyword in cases.get(self.keyword, set()) + + def __repr__(self): + return "ControlLine(%r, %r, %r, %r)" % ( + self.keyword, + self.text, + self.isend, + (self.lineno, self.pos), + ) + + +class Text(Node): + """defines plain text in the template.""" + + def __init__(self, content, **kwargs): + super().__init__(**kwargs) + self.content = content + + def __repr__(self): + return "Text(%r, %r)" % (self.content, (self.lineno, self.pos)) + + +class Code(Node): + """defines a Python code block, either inline or module level. + + e.g.:: + + inline: + <% + x = 12 + %> + + module level: + <%! + import logger + %> + + """ + + def __init__(self, text, ismodule, **kwargs): + super().__init__(**kwargs) + self.text = text + self.ismodule = ismodule + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers + + def __repr__(self): + return "Code(%r, %r, %r)" % ( + self.text, + self.ismodule, + (self.lineno, self.pos), + ) + + +class Comment(Node): + """defines a comment line. + + # this is a comment + + """ + + def __init__(self, text, **kwargs): + super().__init__(**kwargs) + self.text = text + + def __repr__(self): + return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos)) + + +class Expression(Node): + """defines an inline expression. + + ${x+y} + + """ + + def __init__(self, text, escapes, **kwargs): + super().__init__(**kwargs) + self.text = text + self.escapes = escapes + self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs) + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + # TODO: make the "filter" shortcut list configurable at parse/gen time + return self.code.undeclared_identifiers.union( + self.escapes_code.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES + ) + ).difference(self.code.declared_identifiers) + + def __repr__(self): + return "Expression(%r, %r, %r)" % ( + self.text, + self.escapes_code.args, + (self.lineno, self.pos), + ) + + +class _TagMeta(type): + """metaclass to allow Tag to produce a subclass according to + its keyword""" + + _classmap = {} + + def __init__(cls, clsname, bases, dict_): + if getattr(cls, "__keyword__", None) is not None: + cls._classmap[cls.__keyword__] = cls + super().__init__(clsname, bases, dict_) + + def __call__(cls, keyword, attributes, **kwargs): + if ":" in keyword: + ns, defname = keyword.split(":") + return type.__call__( + CallNamespaceTag, ns, defname, attributes, **kwargs + ) + + try: + cls = _TagMeta._classmap[keyword] + except KeyError: + raise exceptions.CompileException( + "No such tag: '%s'" % keyword, + source=kwargs["source"], + lineno=kwargs["lineno"], + pos=kwargs["pos"], + filename=kwargs["filename"], + ) + return type.__call__(cls, keyword, attributes, **kwargs) + + +class Tag(Node, metaclass=_TagMeta): + """abstract base class for tags. + + e.g.:: + + <%sometag/> + + <%someothertag> + stuff + + + """ + + __keyword__ = None + + def __init__( + self, + keyword, + attributes, + expressions, + nonexpressions, + required, + **kwargs, + ): + r"""construct a new Tag instance. + + this constructor not called directly, and is only called + by subclasses. + + :param keyword: the tag keyword + + :param attributes: raw dictionary of attribute key/value pairs + + :param expressions: a set of identifiers that are legal attributes, + which can also contain embedded expressions + + :param nonexpressions: a set of identifiers that are legal + attributes, which cannot contain embedded expressions + + :param \**kwargs: + other arguments passed to the Node superclass (lineno, pos) + + """ + super().__init__(**kwargs) + self.keyword = keyword + self.attributes = attributes + self._parse_attributes(expressions, nonexpressions) + missing = [r for r in required if r not in self.parsed_attributes] + if len(missing): + raise exceptions.CompileException( + ( + "Missing attribute(s): %s" + % ",".join(repr(m) for m in missing) + ), + **self.exception_kwargs, + ) + + self.parent = None + self.nodes = [] + + def is_root(self): + return self.parent is None + + def get_children(self): + return self.nodes + + def _parse_attributes(self, expressions, nonexpressions): + undeclared_identifiers = set() + self.parsed_attributes = {} + for key in self.attributes: + if key in expressions: + expr = [] + for x in re.compile(r"(\${(?:[^$]*?{.+|.+?)})", re.S).split( + self.attributes[key] + ): + m = re.compile(r"^\${(.+?)}$", re.S).match(x) + if m: + code = ast.PythonCode( + m.group(1).rstrip(), **self.exception_kwargs + ) + # we aren't discarding "declared_identifiers" here, + # which we do so that list comprehension-declared + # variables aren't counted. As yet can't find a + # condition that requires it here. + undeclared_identifiers = undeclared_identifiers.union( + code.undeclared_identifiers + ) + expr.append("(%s)" % m.group(1)) + elif x: + expr.append(repr(x)) + self.parsed_attributes[key] = " + ".join(expr) or repr("") + elif key in nonexpressions: + if re.search(r"\${.+?}", self.attributes[key]): + raise exceptions.CompileException( + "Attribute '%s' in tag '%s' does not allow embedded " + "expressions" % (key, self.keyword), + **self.exception_kwargs, + ) + self.parsed_attributes[key] = repr(self.attributes[key]) + else: + raise exceptions.CompileException( + "Invalid attribute for tag '%s': '%s'" + % (self.keyword, key), + **self.exception_kwargs, + ) + self.expression_undeclared_identifiers = undeclared_identifiers + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + return self.expression_undeclared_identifiers + + def __repr__(self): + return "%s(%r, %s, %r, %r)" % ( + self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes, + ) + + +class IncludeTag(Tag): + __keyword__ = "include" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file", "import", "args"), + (), + ("file",), + **kwargs, + ) + self.page_args = ast.PythonCode( + "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + identifiers = self.page_args.undeclared_identifiers.difference( + {"__DUMMY"} + ).difference(self.page_args.declared_identifiers) + return identifiers.union(super().undeclared_identifiers()) + + +class NamespaceTag(Tag): + __keyword__ = "namespace" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file",), + ("name", "inheritable", "import", "module"), + (), + **kwargs, + ) + + self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) + if "name" not in attributes and "import" not in attributes: + raise exceptions.CompileException( + "'name' and/or 'import' attributes are required " + "for <%namespace>", + **self.exception_kwargs, + ) + if "file" in attributes and "module" in attributes: + raise exceptions.CompileException( + "<%namespace> may only have one of 'file' or 'module'", + **self.exception_kwargs, + ) + + def declared_identifiers(self): + return [] + + +class TextTag(Tag): + __keyword__ = "text" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__(keyword, attributes, (), ("filter"), (), **kwargs) + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + def undeclared_identifiers(self): + return self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ).union(self.expression_undeclared_identifiers) + + +class DefTag(Tag): + __keyword__ = "def" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + ("name",), + **kwargs, + ) + name = attributes["name"] + if re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "Missing parenthesis in %def", **self.exception_kwargs + ) + self.function_decl = ast.FunctionDecl( + "def " + name + ":pass", **self.exception_kwargs + ) + self.name = self.function_decl.funcname + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_anonymous = False + is_block = False + + @property + def funcname(self): + return self.function_decl.funcname + + def get_argument_expressions(self, **kw): + return self.function_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.function_decl.allargnames + + def undeclared_identifiers(self): + res = [] + for c in self.function_decl.defaults: + res += list( + ast.PythonCode( + c, **self.exception_kwargs + ).undeclared_identifiers + ) + return ( + set(res) + .union( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ) + .union(self.expression_undeclared_identifiers) + .difference(self.function_decl.allargnames) + ) + + +class BlockTag(Tag): + __keyword__ = "block" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached", "args"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + (), + **kwargs, + ) + name = attributes.get("name") + if name and not re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "%block may not specify an argument signature", + **self.exception_kwargs, + ) + if not name and attributes.get("args", None): + raise exceptions.CompileException( + "Only named %blocks may specify args", **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + self.name = name + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_block = True + + @property + def is_anonymous(self): + return self.name is None + + @property + def funcname(self): + return self.name or "__M_anon_%d" % (self.lineno,) + + def get_argument_expressions(self, **kw): + return self.body_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.body_decl.allargnames + + def undeclared_identifiers(self): + return ( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ).union(self.expression_undeclared_identifiers) + + +class CallTag(Tag): + __keyword__ = "call" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs + ) + self.expression = attributes["expr"] + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class CallNamespaceTag(Tag): + def __init__(self, namespace, defname, attributes, **kwargs): + super().__init__( + namespace + ":" + defname, + attributes, + tuple(attributes.keys()) + ("args",), + (), + (), + **kwargs, + ) + + self.expression = "%s.%s(%s)" % ( + namespace, + defname, + ",".join( + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" + ), + ) + + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class InheritTag(Tag): + __keyword__ = "inherit" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("file",), (), ("file",), **kwargs + ) + + +class PageTag(Tag): + __keyword__ = "page" + + def __init__(self, keyword, attributes, **kwargs): + expressions = [ + "cached", + "args", + "expression_filter", + "enable_loop", + ] + [c for c in attributes if c.startswith("cache_")] + + super().__init__(keyword, attributes, expressions, (), (), **kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + self.filter_args = ast.ArgumentList( + attributes.get("expression_filter", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.body_decl.allargnames diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pygen.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pygen.py new file mode 100644 index 0000000000000000000000000000000000000000..41f9afdaa57799b886f47e2d9c2883ca8e691859 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pygen.py @@ -0,0 +1,309 @@ +# mako/pygen.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for generating and formatting literal Python code.""" + +import re + +from mako import exceptions + + +class PythonPrinter: + def __init__(self, stream): + # indentation counter + self.indent = 0 + + # a stack storing information about why we incremented + # the indentation counter, to help us determine if we + # should decrement it + self.indent_detail = [] + + # the string of whitespace multiplied by the indent + # counter to produce a line + self.indentstring = " " + + # the stream we are writing to + self.stream = stream + + # current line number + self.lineno = 1 + + # a list of lines that represents a buffered "block" of code, + # which can be later printed relative to an indent level + self.line_buffer = [] + + self.in_indent_lines = False + + self._reset_multi_line_flags() + + # mapping of generated python lines to template + # source lines + self.source_map = {} + + self._re_space_comment = re.compile(r"^\s*#") + self._re_space = re.compile(r"^\s*$") + self._re_indent = re.compile(r":[ \t]*(?:#.*)?$") + self._re_compound = re.compile(r"^\s*(if|try|elif|while|for|with)") + self._re_indent_keyword = re.compile( + r"^\s*(def|class|else|elif|except|finally)" + ) + self._re_unindentor = re.compile(r"^\s*(else|elif|except|finally).*\:") + + def _update_lineno(self, num): + self.lineno += num + + def start_source(self, lineno): + if self.lineno not in self.source_map: + self.source_map[self.lineno] = lineno + + def write_blanks(self, num): + self.stream.write("\n" * num) + self._update_lineno(num) + + def write_indented_block(self, block, starting_lineno=None): + """print a line or lines of python which already contain indentation. + + The indentation of the total block of lines will be adjusted to that of + the current indent level.""" + self.in_indent_lines = False + for i, l in enumerate(re.split(r"\r?\n", block)): + self.line_buffer.append(l) + if starting_lineno is not None: + self.start_source(starting_lineno + i) + self._update_lineno(1) + + def writelines(self, *lines): + """print a series of lines of python.""" + for line in lines: + self.writeline(line) + + def writeline(self, line): + """print a line of python, indenting it according to the current + indent level. + + this also adjusts the indentation counter according to the + content of the line. + + """ + + if not self.in_indent_lines: + self._flush_adjusted_lines() + self.in_indent_lines = True + + if ( + line is None + or self._re_space_comment.match(line) + or self._re_space.match(line) + ): + hastext = False + else: + hastext = True + + is_comment = line and len(line) and line[0] == "#" + + # see if this line should decrease the indentation level + if ( + not is_comment + and (not hastext or self._is_unindentor(line)) + and self.indent > 0 + ): + self.indent -= 1 + # if the indent_detail stack is empty, the user + # probably put extra closures - the resulting + # module wont compile. + if len(self.indent_detail) == 0: + # TODO: no coverage here + raise exceptions.MakoException("Too many whitespace closures") + self.indent_detail.pop() + + if line is None: + return + + # write the line + self.stream.write(self._indent_line(line) + "\n") + self._update_lineno(len(line.split("\n"))) + + # see if this line should increase the indentation level. + # note that a line can both decrase (before printing) and + # then increase (after printing) the indentation level. + + if self._re_indent.search(line): + # increment indentation count, and also + # keep track of what the keyword was that indented us, + # if it is a python compound statement keyword + # where we might have to look for an "unindent" keyword + match = self._re_compound.match(line) + if match: + # its a "compound" keyword, so we will check for "unindentors" + indentor = match.group(1) + self.indent += 1 + self.indent_detail.append(indentor) + else: + indentor = None + # its not a "compound" keyword. but lets also + # test for valid Python keywords that might be indenting us, + # else assume its a non-indenting line + m2 = self._re_indent_keyword.match(line) + if m2: + self.indent += 1 + self.indent_detail.append(indentor) + + def close(self): + """close this printer, flushing any remaining lines.""" + self._flush_adjusted_lines() + + def _is_unindentor(self, line): + """return true if the given line is an 'unindentor', + relative to the last 'indent' event received. + + """ + + # no indentation detail has been pushed on; return False + if len(self.indent_detail) == 0: + return False + + indentor = self.indent_detail[-1] + + # the last indent keyword we grabbed is not a + # compound statement keyword; return False + if indentor is None: + return False + + # if the current line doesnt have one of the "unindentor" keywords, + # return False + match = self._re_unindentor.match(line) + # if True, whitespace matches up, we have a compound indentor, + # and this line has an unindentor, this + # is probably good enough + return bool(match) + + # should we decide that its not good enough, heres + # more stuff to check. + # keyword = match.group(1) + + # match the original indent keyword + # for crit in [ + # (r'if|elif', r'else|elif'), + # (r'try', r'except|finally|else'), + # (r'while|for', r'else'), + # ]: + # if re.match(crit[0], indentor) and re.match(crit[1], keyword): + # return True + + # return False + + def _indent_line(self, line, stripspace=""): + """indent the given line according to the current indent level. + + stripspace is a string of space that will be truncated from the + start of the line before indenting.""" + if stripspace == "": + # Fast path optimization. + return self.indentstring * self.indent + line + + return re.sub( + r"^%s" % stripspace, self.indentstring * self.indent, line + ) + + def _reset_multi_line_flags(self): + """reset the flags which would indicate we are in a backslashed + or triple-quoted section.""" + + self.backslashed, self.triplequoted = False, False + + def _in_multi_line(self, line): + """return true if the given line is part of a multi-line block, + via backslash or triple-quote.""" + + # we are only looking for explicitly joined lines here, not + # implicit ones (i.e. brackets, braces etc.). this is just to + # guard against the possibility of modifying the space inside of + # a literal multiline string with unfortunately placed + # whitespace + + current_state = self.backslashed or self.triplequoted + + self.backslashed = bool(re.search(r"\\$", line)) + triples = len(re.findall(r"\"\"\"|\'\'\'", line)) + if triples == 1 or triples % 2 != 0: + self.triplequoted = not self.triplequoted + + return current_state + + def _flush_adjusted_lines(self): + stripspace = None + self._reset_multi_line_flags() + + for entry in self.line_buffer: + if self._in_multi_line(entry): + self.stream.write(entry + "\n") + else: + entry = entry.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry): + stripspace = re.match(r"^([ \t]*)", entry).group(1) + self.stream.write(self._indent_line(entry, stripspace) + "\n") + + self.line_buffer = [] + self._reset_multi_line_flags() + + +def adjust_whitespace(text): + """remove the left-whitespace margin of a block of Python code.""" + + state = [False, False] + (backslashed, triplequoted) = (0, 1) + + def in_multi_line(line): + start_state = state[backslashed] or state[triplequoted] + + if re.search(r"\\$", line): + state[backslashed] = True + else: + state[backslashed] = False + + def match(reg, t): + m = re.match(reg, t) + if m: + return m, t[len(m.group(0)) :] + else: + return None, t + + while line: + if state[triplequoted]: + m, line = match(r"%s" % state[triplequoted], line) + if m: + state[triplequoted] = False + else: + m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) + else: + m, line = match(r"#", line) + if m: + return start_state + + m, line = match(r"\"\"\"|\'\'\'", line) + if m: + state[triplequoted] = m.group(0) + continue + + m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line) + + return start_state + + def _indent_line(line, stripspace=""): + return re.sub(r"^%s" % stripspace, "", line) + + lines = [] + stripspace = None + + for line in re.split(r"\r?\n", text): + if in_multi_line(line): + lines.append(line) + else: + line = line.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", line): + stripspace = re.match(r"^([ \t]*)", line).group(1) + lines.append(_indent_line(line, stripspace)) + return "\n".join(lines) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pyparser.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pyparser.py new file mode 100644 index 0000000000000000000000000000000000000000..1da9ddb5548d06b7a47ab2b4fe5ac3bdaaf5a63b --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/pyparser.py @@ -0,0 +1,235 @@ +# mako/pyparser.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Handles parsing of Python code. + +Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler +module is used. +""" + +import operator + +import _ast + +from mako import _ast_util +from mako import compat +from mako import exceptions +from mako import util + +# words that cannot be assigned to (notably +# smaller than the total keys in __builtins__) +reserved = {"True", "False", "None", "print"} + +# the "id" attribute on a function node +arg_id = operator.attrgetter("arg") + +util.restore__ast(_ast) + + +def parse(code, mode="exec", **exception_kwargs): + """Parse an expression into AST""" + + try: + return _ast_util.parse(code, "", mode) + except Exception as e: + raise exceptions.SyntaxException( + "(%s) %s (%r)" + % ( + compat.exception_as().__class__.__name__, + compat.exception_as(), + code[0:50], + ), + **exception_kwargs, + ) from e + + +class FindIdentifiers(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.in_function = False + self.in_assign_targets = False + self.local_ident_stack = set() + self.listener = listener + self.exception_kwargs = exception_kwargs + + def _add_declared(self, name): + if not self.in_function: + self.listener.declared_identifiers.add(name) + else: + self.local_ident_stack.add(name) + + def visit_ClassDef(self, node): + self._add_declared(node.name) + + def visit_Assign(self, node): + # flip around the visiting of Assign so the expression gets + # evaluated first, in the case of a clause like "x=x+5" (x + # is undeclared) + + self.visit(node.value) + in_a = self.in_assign_targets + self.in_assign_targets = True + for n in node.targets: + self.visit(n) + self.in_assign_targets = in_a + + def visit_ExceptHandler(self, node): + if node.name is not None: + self._add_declared(node.name) + if node.type is not None: + self.visit(node.type) + for statement in node.body: + self.visit(statement) + + def visit_Lambda(self, node, *args): + self._visit_function(node, True) + + def visit_FunctionDef(self, node): + self._add_declared(node.name) + self._visit_function(node, False) + + def visit_ListComp(self, node): + if self.in_function: + for comp in node.generators: + self.visit(comp.target) + self.visit(comp.iter) + else: + self.generic_visit(node) + + visit_SetComp = visit_GeneratorExp = visit_ListComp + + def visit_DictComp(self, node): + if self.in_function: + for comp in node.generators: + self.visit(comp.target) + self.visit(comp.iter) + else: + self.generic_visit(node) + + def _expand_tuples(self, args): + for arg in args: + if isinstance(arg, _ast.Tuple): + yield from arg.elts + else: + yield arg + + def _visit_function(self, node, islambda): + # push function state onto stack. dont log any more + # identifiers as "declared" until outside of the function, + # but keep logging identifiers as "undeclared". track + # argument names in each function header so they arent + # counted as "undeclared" + + inf = self.in_function + self.in_function = True + + local_ident_stack = self.local_ident_stack + self.local_ident_stack = local_ident_stack.union( + [arg_id(arg) for arg in self._expand_tuples(node.args.args)] + ) + if islambda: + self.visit(node.body) + else: + for n in node.body: + self.visit(n) + self.in_function = inf + self.local_ident_stack = local_ident_stack + + def visit_For(self, node): + # flip around visit + + self.visit(node.iter) + self.visit(node.target) + for statement in node.body: + self.visit(statement) + for statement in node.orelse: + self.visit(statement) + + def visit_Name(self, node): + if isinstance(node.ctx, _ast.Store): + # this is eqiuvalent to visit_AssName in + # compiler + self._add_declared(node.id) + elif ( + node.id not in reserved + and node.id not in self.listener.declared_identifiers + and node.id not in self.local_ident_stack + ): + self.listener.undeclared_identifiers.add(node.id) + + def visit_Import(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + else: + self._add_declared(name.name.split(".")[0]) + + def visit_ImportFrom(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + elif name.name == "*": + raise exceptions.CompileException( + "'import *' is not supported, since all identifier " + "names must be explicitly declared. Please use the " + "form 'from import , , " + "...' instead.", + **self.exception_kwargs, + ) + else: + self._add_declared(name.name) + + +class FindTuple(_ast_util.NodeVisitor): + def __init__(self, listener, code_factory, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + self.code_factory = code_factory + + def visit_Tuple(self, node): + for n in node.elts: + p = self.code_factory(n, **self.exception_kwargs) + self.listener.codeargs.append(p) + self.listener.args.append(ExpressionGenerator(n).value()) + ldi = self.listener.declared_identifiers + self.listener.declared_identifiers = ldi.union( + p.declared_identifiers + ) + lui = self.listener.undeclared_identifiers + self.listener.undeclared_identifiers = lui.union( + p.undeclared_identifiers + ) + + +class ParseFunc(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + + def visit_FunctionDef(self, node): + self.listener.funcname = node.name + + argnames = [arg_id(arg) for arg in node.args.args] + if node.args.vararg: + argnames.append(node.args.vararg.arg) + + kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] + if node.args.kwarg: + kwargnames.append(node.args.kwarg.arg) + self.listener.argnames = argnames + self.listener.defaults = node.args.defaults # ast + self.listener.kwargnames = kwargnames + self.listener.kwdefaults = node.args.kw_defaults + self.listener.varargs = node.args.vararg + self.listener.kwargs = node.args.kwarg + + +class ExpressionGenerator: + def __init__(self, astnode): + self.generator = _ast_util.SourceGenerator(" " * 4) + self.generator.visit(astnode) + + def value(self): + return "".join(self.generator.result) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/runtime.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..23401b70c8917fdab24bfa050038e647af7d5c16 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/runtime.py @@ -0,0 +1,968 @@ +# mako/runtime.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides runtime services for templates, including Context, +Namespace, and various helper functions.""" + +import builtins +import functools +import sys + +from mako import compat +from mako import exceptions +from mako import util + + +class Context: + + """Provides runtime namespace, output buffer, and various + callstacks for templates. + + See :ref:`runtime_toplevel` for detail on the usage of + :class:`.Context`. + + """ + + def __init__(self, buffer, **data): + self._buffer_stack = [buffer] + + self._data = data + + self._kwargs = data.copy() + self._with_template = None + self._outputting_as_unicode = None + self.namespaces = {} + + # "capture" function which proxies to the + # generic "capture" function + self._data["capture"] = functools.partial(capture, self) + + # "caller" stack used by def calls with content + self.caller_stack = self._data["caller"] = CallerStack() + + def _set_with_template(self, t): + self._with_template = t + illegal_names = t.reserved_names.intersection(self._data) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words passed to render(): %s" + % ", ".join(illegal_names) + ) + + @property + def lookup(self): + """Return the :class:`.TemplateLookup` associated + with this :class:`.Context`. + + """ + return self._with_template.lookup + + @property + def kwargs(self): + """Return the dictionary of top level keyword arguments associated + with this :class:`.Context`. + + This dictionary only includes the top-level arguments passed to + :meth:`.Template.render`. It does not include names produced within + the template execution such as local variable names or special names + such as ``self``, ``next``, etc. + + The purpose of this dictionary is primarily for the case that + a :class:`.Template` accepts arguments via its ``<%page>`` tag, + which are normally expected to be passed via :meth:`.Template.render`, + except the template is being called in an inheritance context, + using the ``body()`` method. :attr:`.Context.kwargs` can then be + used to propagate these arguments to the inheriting template:: + + ${next.body(**context.kwargs)} + + """ + return self._kwargs.copy() + + def push_caller(self, caller): + """Push a ``caller`` callable onto the callstack for + this :class:`.Context`.""" + + self.caller_stack.append(caller) + + def pop_caller(self): + """Pop a ``caller`` callable onto the callstack for this + :class:`.Context`.""" + + del self.caller_stack[-1] + + def keys(self): + """Return a list of all names established in this :class:`.Context`.""" + + return list(self._data.keys()) + + def __getitem__(self, key): + if key in self._data: + return self._data[key] + else: + return builtins.__dict__[key] + + def _push_writer(self): + """push a capturing buffer onto this Context and return + the new writer function.""" + + buf = util.FastEncodingBuffer() + self._buffer_stack.append(buf) + return buf.write + + def _pop_buffer_and_writer(self): + """pop the most recent capturing buffer from this Context + and return the current writer after the pop. + + """ + + buf = self._buffer_stack.pop() + return buf, self._buffer_stack[-1].write + + def _push_buffer(self): + """push a capturing buffer onto this Context.""" + + self._push_writer() + + def _pop_buffer(self): + """pop the most recent capturing buffer from this Context.""" + + return self._buffer_stack.pop() + + def get(self, key, default=None): + """Return a value from this :class:`.Context`.""" + + return self._data.get(key, builtins.__dict__.get(key, default)) + + def write(self, string): + """Write a string to this :class:`.Context` object's + underlying output buffer.""" + + self._buffer_stack[-1].write(string) + + def writer(self): + """Return the current writer function.""" + + return self._buffer_stack[-1].write + + def _copy(self): + c = Context.__new__(Context) + c._buffer_stack = self._buffer_stack + c._data = self._data.copy() + c._kwargs = self._kwargs + c._with_template = self._with_template + c._outputting_as_unicode = self._outputting_as_unicode + c.namespaces = self.namespaces + c.caller_stack = self.caller_stack + return c + + def _locals(self, d): + """Create a new :class:`.Context` with a copy of this + :class:`.Context`'s current state, + updated with the given dictionary. + + The :attr:`.Context.kwargs` collection remains + unaffected. + + + """ + + if not d: + return self + c = self._copy() + c._data.update(d) + return c + + def _clean_inheritance_tokens(self): + """create a new copy of this :class:`.Context`. with + tokens related to inheritance state removed.""" + + c = self._copy() + x = c._data + x.pop("self", None) + x.pop("parent", None) + x.pop("next", None) + return c + + +class CallerStack(list): + def __init__(self): + self.nextcaller = None + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return len(self) and self._get_caller() and True or False + + def _get_caller(self): + # this method can be removed once + # codegen MAGIC_NUMBER moves past 7 + return self[-1] + + def __getattr__(self, key): + return getattr(self._get_caller(), key) + + def _push_frame(self): + frame = self.nextcaller or None + self.append(frame) + self.nextcaller = None + return frame + + def _pop_frame(self): + self.nextcaller = self.pop() + + +class Undefined: + + """Represents an undefined value in a template. + + All template modules have a constant value + ``UNDEFINED`` present which is an instance of this + object. + + """ + + def __str__(self): + raise NameError("Undefined") + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return False + + +UNDEFINED = Undefined() +STOP_RENDERING = "" + + +class LoopStack: + + """a stack for LoopContexts that implements the context manager protocol + to automatically pop off the top of the stack on context exit + """ + + def __init__(self): + self.stack = [] + + def _enter(self, iterable): + self._push(iterable) + return self._top + + def _exit(self): + self._pop() + return self._top + + @property + def _top(self): + if self.stack: + return self.stack[-1] + else: + return self + + def _pop(self): + return self.stack.pop() + + def _push(self, iterable): + new = LoopContext(iterable) + if self.stack: + new.parent = self.stack[-1] + return self.stack.append(new) + + def __getattr__(self, key): + raise exceptions.RuntimeException("No loop context is established") + + def __iter__(self): + return iter(self._top) + + +class LoopContext: + + """A magic loop variable. + Automatically accessible in any ``% for`` block. + + See the section :ref:`loop_context` for usage + notes. + + :attr:`parent` -> :class:`.LoopContext` or ``None`` + The parent loop, if one exists. + :attr:`index` -> `int` + The 0-based iteration count. + :attr:`reverse_index` -> `int` + The number of iterations remaining. + :attr:`first` -> `bool` + ``True`` on the first iteration, ``False`` otherwise. + :attr:`last` -> `bool` + ``True`` on the last iteration, ``False`` otherwise. + :attr:`even` -> `bool` + ``True`` when ``index`` is even. + :attr:`odd` -> `bool` + ``True`` when ``index`` is odd. + """ + + def __init__(self, iterable): + self._iterable = iterable + self.index = 0 + self.parent = None + + def __iter__(self): + for i in self._iterable: + yield i + self.index += 1 + + @util.memoized_instancemethod + def __len__(self): + return len(self._iterable) + + @property + def reverse_index(self): + return len(self) - self.index - 1 + + @property + def first(self): + return self.index == 0 + + @property + def last(self): + return self.index == len(self) - 1 + + @property + def even(self): + return not self.odd + + @property + def odd(self): + return bool(self.index % 2) + + def cycle(self, *values): + """Cycle through values as the loop progresses.""" + if not values: + raise ValueError("You must provide values to cycle through") + return values[self.index % len(values)] + + +class _NSAttr: + def __init__(self, parent): + self.__parent = parent + + def __getattr__(self, key): + ns = self.__parent + while ns: + if hasattr(ns.module, key): + return getattr(ns.module, key) + else: + ns = ns.inherits + raise AttributeError(key) + + +class Namespace: + + """Provides access to collections of rendering methods, which + can be local, from other templates, or from imported modules. + + To access a particular rendering method referenced by a + :class:`.Namespace`, use plain attribute access: + + .. sourcecode:: mako + + ${some_namespace.foo(x, y, z)} + + :class:`.Namespace` also contains several built-in attributes + described here. + + """ + + def __init__( + self, + name, + context, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + callables = () + + module = None + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + + template = None + """The :class:`.Template` object referenced by this + :class:`.Namespace`, if any. + + """ + + context = None + """The :class:`.Context` object for this :class:`.Namespace`. + + Namespaces are often created with copies of contexts that + contain slightly different data, particularly in inheritance + scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one + can traverse an entire chain of templates that inherit from + one-another. + + """ + + filename = None + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + + If this is a pure module-based + :class:`.Namespace`, this evaluates to ``module.__file__``. If a + template-based namespace, it evaluates to the original + template file location. + + """ + + uri = None + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + + _templateuri = None + + @util.memoized_property + def attr(self): + """Access module level attributes by name. + + This accessor allows templates to supply "scalar" + attributes which are particularly handy in inheritance + relationships. + + .. seealso:: + + :ref:`inheritance_attr` + + :ref:`namespace_attr_for_includes` + + """ + return _NSAttr(self) + + def get_namespace(self, uri): + """Return a :class:`.Namespace` corresponding to the given ``uri``. + + If the given ``uri`` is a relative URI (i.e. it does not + contain a leading slash ``/``), the ``uri`` is adjusted to + be relative to the ``uri`` of the namespace itself. This + method is therefore mostly useful off of the built-in + ``local`` namespace, described in :ref:`namespace_local`. + + In + most cases, a template wouldn't need this function, and + should instead use the ``<%namespace>`` tag to load + namespaces. However, since all ``<%namespace>`` tags are + evaluated before the body of a template ever runs, + this method can be used to locate namespaces using + expressions that were generated within the body code of + the template, or to conditionally use a particular + namespace. + + """ + key = (self, uri) + if key in self.context.namespaces: + return self.context.namespaces[key] + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) + self.context.namespaces[key] = ns + return ns + + def get_template(self, uri): + """Return a :class:`.Template` from the given ``uri``. + + The ``uri`` resolution is relative to the ``uri`` of this + :class:`.Namespace` object's :class:`.Template`. + + """ + return _lookup_template(self.context, uri, self._templateuri) + + def get_cached(self, key, **kwargs): + """Return a value from the :class:`.Cache` referenced by this + :class:`.Namespace` object's :class:`.Template`. + + The advantage to this method versus direct access to the + :class:`.Cache` is that the configuration parameters + declared in ``<%page>`` take effect here, thereby calling + up the same configured backend as that configured + by ``<%page>``. + + """ + + return self.cache.get(key, **kwargs) + + @property + def cache(self): + """Return the :class:`.Cache` object referenced + by this :class:`.Namespace` object's + :class:`.Template`. + + """ + return self.template.cache + + def include_file(self, uri, **kwargs): + """Include a file at the given ``uri``.""" + + _include_file(self.context, uri, self._templateuri, **kwargs) + + def _populate(self, d, l): + for ident in l: + if ident == "*": + for k, v in self._get_star(): + d[k] = v + else: + d[ident] = getattr(self, ident) + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class TemplateNamespace(Namespace): + + """A :class:`.Namespace` specific to a :class:`.Template` instance.""" + + def __init__( + self, + name, + context, + template=None, + templateuri=None, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + if templateuri is not None: + self.template = _lookup_template(context, templateuri, calling_uri) + self._templateuri = self.template.module._template_uri + elif template is not None: + self.template = template + self._templateuri = template.module._template_uri + else: + raise TypeError("'template' argument is required.") + + if populate_self: + lclcallable, lclcontext = _populate_self_namespace( + context, self.template, self_ns=self + ) + + @property + def module(self): + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + return self.template.module + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.template.filename + + @property + def uri(self): + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + return self.template.uri + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def get(key): + callable_ = self.template._get_def_callable(key) + return functools.partial(callable_, self.context) + + for k in self.template.module._exports: + yield (k, get(k)) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.template.has_def(key): + callable_ = self.template._get_def_callable(key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class ModuleNamespace(Namespace): + + """A :class:`.Namespace` specific to a Python module instance.""" + + def __init__( + self, + name, + context, + module, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + mod = __import__(module) + for token in module.split(".")[1:]: + mod = getattr(mod, token) + self.module = mod + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.module.__file__ + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + for key in dir(self.module): + if key[0] != "_": + callable_ = getattr(self.module, key) + if callable(callable_): + yield key, functools.partial(callable_, self.context) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif hasattr(self.module, key): + callable_ = getattr(self.module, key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +def supports_caller(func): + """Apply a caller_stack compatibility decorator to a plain + Python function. + + See the example in :ref:`namespaces_python_modules`. + + """ + + def wrap_stackframe(context, *args, **kwargs): + context.caller_stack._push_frame() + try: + return func(context, *args, **kwargs) + finally: + context.caller_stack._pop_frame() + + return wrap_stackframe + + +def capture(context, callable_, *args, **kwargs): + """Execute the given template def, capturing the output into + a buffer. + + See the example in :ref:`namespaces_python_modules`. + + """ + + if not callable(callable_): + raise exceptions.RuntimeException( + "capture() function expects a callable as " + "its argument (i.e. capture(func, *args, **kwargs))" + ) + context._push_buffer() + try: + callable_(*args, **kwargs) + finally: + buf = context._pop_buffer() + return buf.getvalue() + + +def _decorate_toplevel(fn): + def decorate_render(render_fn): + def go(context, *args, **kw): + def y(*args, **kw): + return render_fn(context, *args, **kw) + + try: + y.__name__ = render_fn.__name__[7:] + except TypeError: + # < Python 2.4 + pass + return fn(y)(context, *args, **kw) + + return go + + return decorate_render + + +def _decorate_inline(context, fn): + def decorate_render(render_fn): + dec = fn(render_fn) + + def go(*args, **kw): + return dec(context, *args, **kw) + + return go + + return decorate_render + + +def _include_file(context, uri, calling_uri, **kwargs): + """locate the template from the given uri and include it in + the current output.""" + + template = _lookup_template(context, uri, calling_uri) + (callable_, ctx) = _populate_self_namespace( + context._clean_inheritance_tokens(), template + ) + kwargs = _kwargs_for_include(callable_, context._data, **kwargs) + if template.include_error_handler: + try: + callable_(ctx, **kwargs) + except Exception: + result = template.include_error_handler(ctx, compat.exception_as()) + if not result: + raise + else: + callable_(ctx, **kwargs) + + +def _inherit_from(context, uri, calling_uri): + """called by the _inherit method in template modules to set + up the inheritance chain at the start of a template's + execution.""" + + if uri is None: + return None + template = _lookup_template(context, uri, calling_uri) + self_ns = context["self"] + ih = self_ns + while ih.inherits is not None: + ih = ih.inherits + lclcontext = context._locals({"next": ih}) + ih.inherits = TemplateNamespace( + "self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False, + ) + context._data["parent"] = lclcontext._data["local"] = ih.inherits + callable_ = getattr(template.module, "_mako_inherit", None) + if callable_ is not None: + ret = callable_(template, lclcontext) + if ret: + return ret + + gen_ns = getattr(template.module, "_mako_generate_namespaces", None) + if gen_ns is not None: + gen_ns(context) + return (template.callable_, lclcontext) + + +def _lookup_template(context, uri, relativeto): + lookup = context._with_template.lookup + if lookup is None: + raise exceptions.TemplateLookupException( + "Template '%s' has no TemplateLookup associated" + % context._with_template.uri + ) + uri = lookup.adjust_uri(uri, relativeto) + try: + return lookup.get_template(uri) + except exceptions.TopLevelLookupException as e: + raise exceptions.TemplateLookupException( + str(compat.exception_as()) + ) from e + + +def _populate_self_namespace(context, template, self_ns=None): + if self_ns is None: + self_ns = TemplateNamespace( + "self:%s" % template.uri, + context, + template=template, + populate_self=False, + ) + context._data["self"] = context._data["local"] = self_ns + if hasattr(template.module, "_mako_inherit"): + ret = template.module._mako_inherit(template, context) + if ret: + return ret + return (template.callable_, context) + + +def _render(template, callable_, args, data, as_unicode=False): + """create a Context and return the string + output of the given template and template callable.""" + + if as_unicode: + buf = util.FastEncodingBuffer() + else: + buf = util.FastEncodingBuffer( + encoding=template.output_encoding, errors=template.encoding_errors + ) + context = Context(buf, **data) + context._outputting_as_unicode = as_unicode + context._set_with_template(template) + + _render_context( + template, + callable_, + context, + *args, + **_kwargs_for_callable(callable_, data), + ) + return context._pop_buffer().getvalue() + + +def _kwargs_for_callable(callable_, data): + argspec = compat.inspect_getargspec(callable_) + # for normal pages, **pageargs is usually present + if argspec[2]: + return data + + # for rendering defs from the top level, figure out the args + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + kwargs = {} + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _kwargs_for_include(callable_, data, **kwargs): + argspec = compat.inspect_getargspec(callable_) + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _render_context(tmpl, callable_, context, *args, **kwargs): + import mako.template as template + + # create polymorphic 'self' namespace for this + # template with possibly updated context + if not isinstance(tmpl, template.DefTemplate): + # if main render method, call from the base of the inheritance stack + (inherit, lclcontext) = _populate_self_namespace(context, tmpl) + _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) + else: + # otherwise, call the actual rendering method specified + (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent) + _exec_template(callable_, context, args=args, kwargs=kwargs) + + +def _exec_template(callable_, context, args=None, kwargs=None): + """execute a rendering callable given the callable, a + Context, and optional explicit arguments + + the contextual Template will be located if it exists, and + the error handling options specified on that Template will + be interpreted here. + """ + template = context._with_template + if template is not None and ( + template.format_exceptions or template.error_handler + ): + try: + callable_(context, *args, **kwargs) + except Exception: + _render_error(template, context, compat.exception_as()) + except: + e = sys.exc_info()[0] + _render_error(template, context, e) + else: + callable_(context, *args, **kwargs) + + +def _render_error(template, context, error): + if template.error_handler: + result = template.error_handler(context, error) + if not result: + tp, value, tb = sys.exc_info() + if value and tb: + raise value.with_traceback(tb) + else: + raise error + else: + error_template = exceptions.html_error_template() + if context._outputting_as_unicode: + context._buffer_stack[:] = [util.FastEncodingBuffer()] + else: + context._buffer_stack[:] = [ + util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors, + ) + ] + + context._set_with_template(error_template) + error_template.render_context(context, error=error) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/template.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/template.py new file mode 100644 index 0000000000000000000000000000000000000000..82c7cba8b450ffe9dd2cd91739db1c3e2596ca54 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/template.py @@ -0,0 +1,711 @@ +# mako/template.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides the Template class, a facade for parsing, generating and executing +template strings, as well as template runtime operations.""" + +import json +import os +import re +import shutil +import stat +import tempfile +import types +import weakref + +from mako import cache +from mako import codegen +from mako import compat +from mako import exceptions +from mako import runtime +from mako import util +from mako.lexer import Lexer + + +class Template: + r"""Represents a compiled template. + + :class:`.Template` includes a reference to the original + template source (via the :attr:`.source` attribute) + as well as the source code of the + generated Python module (i.e. the :attr:`.code` attribute), + as well as a reference to an actual Python module. + + :class:`.Template` is constructed using either a literal string + representing the template text, or a filename representing a filesystem + path to a source file. + + :param text: textual template source. This argument is mutually + exclusive versus the ``filename`` parameter. + + :param filename: filename of the source template. This argument is + mutually exclusive versus the ``text`` parameter. + + :param buffer_filters: string list of filters to be applied + to the output of ``%def``\ s which are buffered, cached, or otherwise + filtered, after all filters + defined with the ``%def`` itself have been applied. Allows the + creation of default expression filters that let the output + of return-valued ``%def``\ s "opt out" of that filtering via + passing special attributes or objects. + + :param cache_args: Dictionary of cache configuration arguments that + will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`. + + :param cache_dir: + + .. deprecated:: 0.6 + Use the ``'dir'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_enabled: Boolean flag which enables caching of this + template. See :ref:`caching_toplevel`. + + :param cache_impl: String name of a :class:`.CacheImpl` caching + implementation to use. Defaults to ``'beaker'``. + + :param cache_type: + + .. deprecated:: 0.6 + Use the ``'type'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_url: + + .. deprecated:: 0.6 + Use the ``'url'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param default_filters: List of string filter names that will + be applied to all expressions. See :ref:`filtering_default_filters`. + + :param enable_loop: When ``True``, enable the ``loop`` context variable. + This can be set to ``False`` to support templates that may + be making usage of the name "``loop``". Individual templates can + re-enable the "loop" context by placing the directive + ``enable_loop="True"`` inside the ``<%page>`` tag -- see + :ref:`migrating_loop`. + + :param encoding_errors: Error parameter passed to ``encode()`` when + string encoding is performed. See :ref:`usage_unicode`. + + :param error_handler: Python callable which is called whenever + compile or runtime exceptions occur. The callable is passed + the current context as well as the exception. If the + callable returns ``True``, the exception is considered to + be handled, else it is re-raised after the function + completes. Is used to provide custom error-rendering + functions. + + .. seealso:: + + :paramref:`.Template.include_error_handler` - include-specific + error handler function + + :param format_exceptions: if ``True``, exceptions which occur during + the render phase of this template will be caught and + formatted into an HTML error page, which then becomes the + rendered result of the :meth:`.render` call. Otherwise, + runtime exceptions are propagated outwards. + + :param imports: String list of Python statements, typically individual + "import" lines, which will be placed into the module level + preamble of all generated Python modules. See the example + in :ref:`filtering_default_filters`. + + :param future_imports: String list of names to import from `__future__`. + These will be concatenated into a comma-separated string and inserted + into the beginning of the template, e.g. ``futures_imports=['FOO', + 'BAR']`` results in ``from __future__ import FOO, BAR``. + + :param include_error_handler: An error handler that runs when this template + is included within another one via the ``<%include>`` tag, and raises an + error. Compare to the :paramref:`.Template.error_handler` option. + + .. versionadded:: 1.0.6 + + .. seealso:: + + :paramref:`.Template.error_handler` - top-level error handler function + + :param input_encoding: Encoding of the template's source code. Can + be used in lieu of the coding comment. See + :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for + details on source encoding. + + :param lookup: a :class:`.TemplateLookup` instance that will be used + for all file lookups via the ``<%namespace>``, + ``<%include>``, and ``<%inherit>`` tags. See + :ref:`usage_templatelookup`. + + :param module_directory: Filesystem location where generated + Python module files will be placed. + + :param module_filename: Overrides the filename of the generated + Python module file. For advanced usage only. + + :param module_writer: A callable which overrides how the Python + module is written entirely. The callable is passed the + encoded source content of the module and the destination + path to be written to. The default behavior of module writing + uses a tempfile in conjunction with a file move in order + to make the operation atomic. So a user-defined module + writing function that mimics the default behavior would be: + + .. sourcecode:: python + + import tempfile + import os + import shutil + + def module_writer(source, outputpath): + (dest, name) = \\ + tempfile.mkstemp( + dir=os.path.dirname(outputpath) + ) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + from mako.template import Template + mytemplate = Template( + filename="index.html", + module_directory="/path/to/modules", + module_writer=module_writer + ) + + The function is provided for unusual configurations where + certain platform-specific permissions or other special + steps are needed. + + :param output_encoding: The encoding to use when :meth:`.render` + is called. + See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`. + + :param preprocessor: Python callable which will be passed + the full template source before it is parsed. The return + result of the callable will be used as the template source + code. + + :param lexer_cls: A :class:`.Lexer` class used to parse + the template. The :class:`.Lexer` class is used by + default. + + .. versionadded:: 0.7.4 + + :param strict_undefined: Replaces the automatic usage of + ``UNDEFINED`` for any undeclared variables not located in + the :class:`.Context` with an immediate raise of + ``NameError``. The advantage is immediate reporting of + missing variables which include the name. + + .. versionadded:: 0.3.6 + + :param uri: string URI or other identifier for this template. + If not provided, the ``uri`` is generated from the filesystem + path, or from the in-memory identity of a non-file-based + template. The primary usage of the ``uri`` is to provide a key + within :class:`.TemplateLookup`, as well as to generate the + file path of the generated Python module file, if + ``module_directory`` is specified. + + """ + + lexer_cls = Lexer + + def __init__( + self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors="strict", + module_directory=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + if uri: + self.module_id = re.sub(r"\W", "_", uri) + self.uri = uri + elif filename: + self.module_id = re.sub(r"\W", "_", filename) + drive, path = os.path.splitdrive(filename) + path = os.path.normpath(path).replace(os.path.sep, "/") + self.uri = path + else: + self.module_id = "memory:" + hex(id(self)) + self.uri = self.module_id + + u_norm = self.uri + if u_norm.startswith("/"): + u_norm = u_norm[1:] + u_norm = os.path.normpath(u_norm) + if u_norm.startswith(".."): + raise exceptions.TemplateLookupException( + 'Template uri "%s" is invalid - ' + "it cannot be relative outside " + "of the root path." % self.uri + ) + + self.input_encoding = input_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = enable_loop + self.strict_undefined = strict_undefined + self.module_writer = module_writer + + if default_filters is None: + self.default_filters = ["str"] + else: + self.default_filters = default_filters + self.buffer_filters = buffer_filters + + self.imports = imports + self.future_imports = future_imports + self.preprocessor = preprocessor + + if lexer_cls is not None: + self.lexer_cls = lexer_cls + + # if plain text, compile code in memory only + if text is not None: + (code, module) = _compile_text(self, text, filename) + self._code = code + self._source = text + ModuleInfo(module, None, self, filename, code, text, uri) + elif filename is not None: + # if template filename and a module directory, load + # a filesystem-based module file, generating if needed + if module_filename is not None: + path = module_filename + elif module_directory is not None: + path = os.path.abspath( + os.path.join( + os.path.normpath(module_directory), u_norm + ".py" + ) + ) + else: + path = None + module = self._compile_from_file(path, filename) + else: + raise exceptions.RuntimeException( + "Template requires text or filename" + ) + + self.module = module + self.filename = filename + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + + self.module_directory = module_directory + + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + @util.memoized_property + def reserved_names(self): + if self.enable_loop: + return codegen.RESERVED_NAMES + else: + return codegen.RESERVED_NAMES.difference(["loop"]) + + def _setup_cache_args( + self, + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ): + self.cache_impl = cache_impl + self.cache_enabled = cache_enabled + self.cache_args = cache_args or {} + # transfer deprecated cache_* args + if cache_type: + self.cache_args["type"] = cache_type + if cache_dir: + self.cache_args["dir"] = cache_dir + if cache_url: + self.cache_args["url"] = cache_url + + def _compile_from_file(self, path, filename): + if path is not None: + util.verify_directory(os.path.dirname(path)) + filemtime = os.stat(filename)[stat.ST_MTIME] + if ( + not os.path.exists(path) + or os.stat(path)[stat.ST_MTIME] < filemtime + ): + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + if module._magic_number != codegen.MAGIC_NUMBER: + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + ModuleInfo(module, path, self, filename, None, None, None) + else: + # template filename and no module directory, compile code + # in memory + data = util.read_file(filename) + code, module = _compile_text(self, data, filename) + self._source = None + self._code = code + ModuleInfo(module, None, self, filename, code, None, None) + return module + + @property + def source(self): + """Return the template source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).source + + @property + def code(self): + """Return the module source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).code + + @util.memoized_property + def cache(self): + return cache.Cache(self) + + @property + def cache_dir(self): + return self.cache_args["dir"] + + @property + def cache_url(self): + return self.cache_args["url"] + + @property + def cache_type(self): + return self.cache_args["type"] + + def render(self, *args, **data): + """Render the output of this template as a string. + + If the template specifies an output encoding, the string + will be encoded accordingly, else the output is raw (raw + output uses `StringIO` and can't handle multibyte + characters). A :class:`.Context` object is created corresponding + to the given data. Arguments that are explicitly declared + by this template's internal rendering method are also + pulled from the given ``*args``, ``**data`` members. + + """ + return runtime._render(self, self.callable_, args, data) + + def render_unicode(self, *args, **data): + """Render the output of this template as a unicode object.""" + + return runtime._render( + self, self.callable_, args, data, as_unicode=True + ) + + def render_context(self, context, *args, **kwargs): + """Render this :class:`.Template` with the given context. + + The data is written to the context's buffer. + + """ + if getattr(context, "_with_template", None) is None: + context._set_with_template(self) + runtime._render_context(self, self.callable_, context, *args, **kwargs) + + def has_def(self, name): + return hasattr(self.module, "render_%s" % name) + + def get_def(self, name): + """Return a def of this template as a :class:`.DefTemplate`.""" + + return DefTemplate(self, getattr(self.module, "render_%s" % name)) + + def list_defs(self): + """return a list of defs in the template. + + .. versionadded:: 1.0.4 + + """ + return [i[7:] for i in dir(self.module) if i[:7] == "render_"] + + def _get_def_callable(self, name): + return getattr(self.module, "render_%s" % name) + + @property + def last_modified(self): + return self.module._modified_time + + +class ModuleTemplate(Template): + + """A Template which is constructed given an existing Python module. + + e.g.:: + + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() + + import mymodule + + t = ModuleTemplate(mymodule) + print(t.render()) + + """ + + def __init__( + self, + module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors="strict", + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + include_error_handler=None, + ): + self.module_id = re.sub(r"\W", "_", module._template_uri) + self.uri = module._template_uri + self.input_encoding = module._source_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = module._enable_loop + + self.module = module + self.filename = template_filename + ModuleInfo( + module, + module_filename, + self, + template_filename, + module_source, + template_source, + module._template_uri, + ) + + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + +class DefTemplate(Template): + + """A :class:`.Template` which represents a callable def in a parent + template.""" + + def __init__(self, parent, callable_): + self.parent = parent + self.callable_ = callable_ + self.output_encoding = parent.output_encoding + self.module = parent.module + self.encoding_errors = parent.encoding_errors + self.format_exceptions = parent.format_exceptions + self.error_handler = parent.error_handler + self.include_error_handler = parent.include_error_handler + self.enable_loop = parent.enable_loop + self.lookup = parent.lookup + + def get_def(self, name): + return self.parent.get_def(name) + + +class ModuleInfo: + + """Stores information about a module currently loaded into + memory, provides reverse lookups of template source, module + source code based on a module's identifier. + + """ + + _modules = weakref.WeakValueDictionary() + + def __init__( + self, + module, + module_filename, + template, + template_filename, + module_source, + template_source, + template_uri, + ): + self.module = module + self.module_filename = module_filename + self.template_filename = template_filename + self.module_source = module_source + self.template_source = template_source + self.template_uri = template_uri + self._modules[module.__name__] = template._mmarker = self + if module_filename: + self._modules[module_filename] = self + + @classmethod + def get_module_source_metadata(cls, module_source, full_line_map=False): + source_map = re.search( + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S + ).group(1) + source_map = json.loads(source_map) + source_map["line_map"] = { + int(k): int(v) for k, v in source_map["line_map"].items() + } + if full_line_map: + f_line_map = source_map["full_line_map"] = [] + line_map = source_map["line_map"] + + curr_templ_line = 1 + for mod_line in range(1, max(line_map)): + if mod_line in line_map: + curr_templ_line = line_map[mod_line] + f_line_map.append(curr_templ_line) + return source_map + + @property + def code(self): + if self.module_source is not None: + return self.module_source + else: + return util.read_python_file(self.module_filename) + + @property + def source(self): + if self.template_source is None: + data = util.read_file(self.template_filename) + if self.module._source_encoding: + return data.decode(self.module._source_encoding) + else: + return data + + elif self.module._source_encoding and not isinstance( + self.template_source, str + ): + return self.template_source.decode(self.module._source_encoding) + else: + return self.template_source + + +def _compile(template, text, filename, generate_magic_comment): + lexer = template.lexer_cls( + text, + filename, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor, + ) + node = lexer.parse() + source = codegen.compile( + node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names, + ) + return source, lexer + + +def _compile_text(template, text, filename): + identifier = template.module_id + source, lexer = _compile( + template, text, filename, generate_magic_comment=False + ) + + cid = identifier + module = types.ModuleType(cid) + code = compile(source, cid, "exec") + + # this exec() works for 2.4->3.3. + exec(code, module.__dict__, module.__dict__) + return (source, module) + + +def _compile_module_file(template, text, filename, outputpath, module_writer): + source, lexer = _compile( + template, text, filename, generate_magic_comment=True + ) + + if isinstance(source, str): + source = source.encode(lexer.encoding or "ascii") + + if module_writer: + module_writer(source, outputpath) + else: + # make tempfiles in the same location as the ultimate + # location. this ensures they're on the same filesystem, + # avoiding synchronization issues. + (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath)) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + +def _get_module_info_from_callable(callable_): + return _get_module_info(callable_.__globals__["__name__"]) + + +def _get_module_info(filename): + return ModuleInfo._modules[filename] diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/_config.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..4ee3d0a6ed770aba19adb757ffd552d504c5b9e2 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/_config.py @@ -0,0 +1,128 @@ +import configparser +import dataclasses +from dataclasses import dataclass +from pathlib import Path +from typing import Callable +from typing import ClassVar +from typing import Optional +from typing import Union + +from .helpers import make_path + + +class ConfigError(BaseException): + pass + + +class MissingConfig(ConfigError): + pass + + +class MissingConfigSection(ConfigError): + pass + + +class MissingConfigItem(ConfigError): + pass + + +class ConfigValueTypeError(ConfigError): + pass + + +class _GetterDispatch: + def __init__(self, initialdata, default_getter: Callable): + self.default_getter = default_getter + self.data = initialdata + + def get_fn_for_type(self, type_): + return self.data.get(type_, self.default_getter) + + def get_typed_value(self, type_, name): + get_fn = self.get_fn_for_type(type_) + return get_fn(name) + + +def _parse_cfg_file(filespec: Union[Path, str]): + cfg = configparser.ConfigParser() + try: + filepath = make_path(filespec, check_exists=True) + except FileNotFoundError as e: + raise MissingConfig(f"No config file found at {filespec}") from e + else: + with open(filepath, encoding="utf-8") as f: + cfg.read_file(f) + return cfg + + +def _build_getter(cfg_obj, cfg_section, method, converter=None): + def caller(option, **kwargs): + try: + rv = getattr(cfg_obj, method)(cfg_section, option, **kwargs) + except configparser.NoSectionError as nse: + raise MissingConfigSection( + f"No config section named {cfg_section}" + ) from nse + except configparser.NoOptionError as noe: + raise MissingConfigItem(f"No config item for {option}") from noe + except ValueError as ve: + # ConfigParser.getboolean, .getint, .getfloat raise ValueError + # on bad types + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from ve + else: + if converter: + try: + rv = converter(rv) + except Exception as e: + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from e + return rv + + return caller + + +def _build_getter_dispatch(cfg_obj, cfg_section, converters=None): + converters = converters or {} + + default_getter = _build_getter(cfg_obj, cfg_section, "get") + + # support ConfigParser builtins + getters = { + int: _build_getter(cfg_obj, cfg_section, "getint"), + bool: _build_getter(cfg_obj, cfg_section, "getboolean"), + float: _build_getter(cfg_obj, cfg_section, "getfloat"), + str: default_getter, + } + + # use ConfigParser.get and convert value + getters.update( + { + type_: _build_getter( + cfg_obj, cfg_section, "get", converter=converter_fn + ) + for type_, converter_fn in converters.items() + } + ) + + return _GetterDispatch(getters, default_getter) + + +@dataclass +class ReadsCfg: + section_header: ClassVar[str] + converters: ClassVar[Optional[dict]] = None + + @classmethod + def from_cfg_file(cls, filespec: Union[Path, str]): + cfg = _parse_cfg_file(filespec) + dispatch = _build_getter_dispatch( + cfg, cls.section_header, converters=cls.converters + ) + kwargs = { + field.name: dispatch.get_typed_value(field.type, field.name) + for field in dataclasses.fields(cls) + } + return cls(**kwargs) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/assertions.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/assertions.py new file mode 100644 index 0000000000000000000000000000000000000000..22221cd20c4a65054ad0f6fec17640255671a81d --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/assertions.py @@ -0,0 +1,166 @@ +import contextlib +import re +import sys + + +def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + +def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + +def _assert_proper_exception_context(exception): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + + if ( + exception.__context__ is not exception.__cause__ + and not exception.__suppress_context__ + ): + assert False, ( + "Exception %r was correctly raised but did not set a cause, " + "within context %r as its cause." + % (exception, exception.__context__) + ) + + +def _assert_proper_cause_cls(exception, cause_cls): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + assert isinstance(exception.__cause__, cause_cls), ( + "Exception %r was correctly raised but has cause %r, which does not " + "have the expected cause type %r." + % (exception, exception.__cause__, cause_cls) + ) + + +def assert_raises(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw) + + +def assert_raises_with_proper_context(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw, check_context=True) + + +def assert_raises_with_given_cause( + except_cls, cause_cls, callable_, *args, **kw +): + return _assert_raises(except_cls, callable_, args, kw, cause_cls=cause_cls) + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) + + +def assert_raises_message_with_proper_context( + except_cls, msg, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, check_context=True + ) + + +def assert_raises_message_with_given_cause( + except_cls, msg, cause_cls, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, cause_cls=cause_cls + ) + + +def _assert_raises( + except_cls, + callable_, + args, + kwargs, + msg=None, + check_context=False, + cause_cls=None, +): + with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: + callable_(*args, **kwargs) + return ec.error + + +class _ErrorContainer: + error = None + + +@contextlib.contextmanager +def _expect_raises(except_cls, msg=None, check_context=False, cause_cls=None): + ec = _ErrorContainer() + if check_context: + are_we_already_in_a_traceback = sys.exc_info()[0] + try: + yield ec + success = False + except except_cls as err: + ec.error = err + success = True + if msg is not None: + # I'm often pdbing here, and "err" above isn't + # in scope, so assign the string explicitly + error_as_string = str(err) + assert re.search(msg, error_as_string, re.UNICODE), "%r !~ %s" % ( + msg, + error_as_string, + ) + if cause_cls is not None: + _assert_proper_cause_cls(err, cause_cls) + if check_context and not are_we_already_in_a_traceback: + _assert_proper_exception_context(err) + print(str(err).encode("utf-8")) + + # it's generally a good idea to not carry traceback objects outside + # of the except: block, but in this case especially we seem to have + # hit some bug in either python 3.10.0b2 or greenlet or both which + # this seems to fix: + # https://github.com/python-greenlet/greenlet/issues/242 + del ec + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def expect_raises(except_cls, check_context=False): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message(except_cls, msg, check_context=False): + return _expect_raises(except_cls, msg=msg, check_context=check_context) + + +def expect_raises_with_proper_context(except_cls, check_context=True): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message_with_proper_context( + except_cls, msg, check_context=True +): + return _expect_raises(except_cls, msg=msg, check_context=check_context) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/config.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/config.py new file mode 100644 index 0000000000000000000000000000000000000000..b77d0c08084b0a9b1b5e7395e1bd54895ba689e6 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/config.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from pathlib import Path + +from ._config import ReadsCfg +from .helpers import make_path + + +@dataclass +class Config(ReadsCfg): + module_base: Path + template_base: Path + + section_header = "mako_testing" + converters = {Path: make_path} + + +config = Config.from_cfg_file("./setup.cfg") diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/exclusions.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/exclusions.py new file mode 100644 index 0000000000000000000000000000000000000000..37b2d14a395f5c27ed608955860471d597e2ae3d --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/exclusions.py @@ -0,0 +1,80 @@ +import pytest + +from mako.ext.beaker_cache import has_beaker +from mako.util import update_wrapper + + +try: + import babel.messages.extract as babel +except ImportError: + babel = None + + +try: + import lingua +except ImportError: + lingua = None + + +try: + import dogpile.cache # noqa +except ImportError: + has_dogpile_cache = False +else: + has_dogpile_cache = True + + +requires_beaker = pytest.mark.skipif( + not has_beaker, reason="Beaker is required for these tests." +) + + +requires_babel = pytest.mark.skipif( + babel is None, reason="babel not installed: skipping babelplugin test" +) + + +requires_lingua = pytest.mark.skipif( + lingua is None, reason="lingua not installed: skipping linguaplugin test" +) + + +requires_dogpile_cache = pytest.mark.skipif( + not has_dogpile_cache, + reason="dogpile.cache is required to run these tests", +) + + +def _pygments_version(): + try: + import pygments + + version = pygments.__version__ + except: + version = "0" + return version + + +requires_pygments_14 = pytest.mark.skipif( + _pygments_version() < "1.4", reason="Requires pygments 1.4 or greater" +) + + +# def requires_pygments_14(fn): + +# return skip_if( +# lambda: version < "1.4", "Requires pygments 1.4 or greater" +# )(fn) + + +def requires_no_pygments_exceptions(fn): + def go(*arg, **kw): + from mako import exceptions + + exceptions._install_fallback() + try: + return fn(*arg, **kw) + finally: + exceptions._install_highlighting() + + return update_wrapper(go, fn) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/fixtures.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/fixtures.py new file mode 100644 index 0000000000000000000000000000000000000000..01e996171d16be446ee849ceafafaf2cf699f256 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/fixtures.py @@ -0,0 +1,119 @@ +import os + +from mako.cache import CacheImpl +from mako.cache import register_plugin +from mako.template import Template +from .assertions import eq_ +from .config import config + + +class TemplateTest: + def _file_template(self, filename, **kw): + filepath = self._file_path(filename) + return Template( + uri=filename, + filename=filepath, + module_directory=config.module_base, + **kw, + ) + + def _file_path(self, filename): + name, ext = os.path.splitext(filename) + py3k_path = os.path.join(config.template_base, name + "_py3k" + ext) + if os.path.exists(py3k_path): + return py3k_path + + return os.path.join(config.template_base, filename) + + def _do_file_test( + self, + filename, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = self._file_template(filename, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_memory_test( + self, + source, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = Template(text=source, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_test( + self, + template, + expected, + filters=None, + template_args=None, + unicode_=True, + ): + if template_args is None: + template_args = {} + if unicode_: + output = template.render_unicode(**template_args) + else: + output = template.render(**template_args) + + if filters: + output = filters(output) + eq_(output, expected) + + def indicates_unbound_local_error(self, rendered_output, unbound_var): + var = f"'{unbound_var}'" + error_msgs = ( + # < 3.11 + f"local variable {var} referenced before assignment", + # >= 3.11 + f"cannot access local variable {var} where it is not associated", + ) + return any((msg in rendered_output) for msg in error_msgs) + + +class PlainCacheImpl(CacheImpl): + """Simple memory cache impl so that tests which + use caching can run without beaker.""" + + def __init__(self, cache): + self.cache = cache + self.data = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self.data: + return self.data[key] + else: + self.data[key] = data = creation_function(**kw) + return data + + def put(self, key, value, **kw): + self.data[key] = value + + def get(self, key, **kw): + return self.data[key] + + def invalidate(self, key, **kw): + del self.data[key] + + +register_plugin("plain", __name__, "PlainCacheImpl") diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/helpers.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..5ae9d38d3d590d51cfe4486dab31589207a287d3 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/testing/helpers.py @@ -0,0 +1,71 @@ +import contextlib +import pathlib +from pathlib import Path +import re +import time +from typing import Union +from unittest import mock + + +def flatten_result(result): + return re.sub(r"[\s\r\n]+", " ", result).strip() + + +def result_lines(result): + return [ + x.strip() + for x in re.split(r"\r?\n", re.sub(r" +", " ", result)) + if x.strip() != "" + ] + + +def result_raw_lines(result): + return [x for x in re.split(r"\r?\n", result) if x.strip() != ""] + + +def make_path( + filespec: Union[Path, str], + make_absolute: bool = True, + check_exists: bool = False, +) -> Path: + path = Path(filespec) + if make_absolute: + path = path.resolve(strict=check_exists) + if check_exists and (not path.exists()): + raise FileNotFoundError(f"No file or directory at {filespec}") + return path + + +def _unlink_path(path, missing_ok=False): + # Replicate 3.8+ functionality in 3.7 + cm = contextlib.nullcontext() + if missing_ok: + cm = contextlib.suppress(FileNotFoundError) + + with cm: + path.unlink() + + +def replace_file_with_dir(pathspec): + path = pathlib.Path(pathspec) + _unlink_path(path, missing_ok=True) + path.mkdir(exist_ok=True) + return path + + +def file_with_template_code(filespec): + with open(filespec, "w") as f: + f.write( + """ +i am an artificial template just for you +""" + ) + return filespec + + +@contextlib.contextmanager +def rewind_compile_time(hours=1): + rewound = time.time() - (hours * 3_600) + with mock.patch("mako.codegen.time") as codegen_time: + codegen_time.time.return_value = rewound + yield diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/util.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/util.py new file mode 100644 index 0000000000000000000000000000000000000000..470713a5172f268a7b7bfcd7b5a9f90000cfb7f8 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/mako/util.py @@ -0,0 +1,388 @@ +# mako/util.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from ast import parse +import codecs +import collections +import operator +import os +import re +import timeit + +from .compat import importlib_metadata_get + + +def update_wrapper(decorated, fn): + decorated.__wrapped__ = fn + decorated.__name__ = fn.__name__ + return decorated + + +class PluginLoader: + def __init__(self, group): + self.group = group + self.impls = {} + + def load(self, name): + if name in self.impls: + return self.impls[name]() + + for impl in importlib_metadata_get(self.group): + if impl.name == name: + self.impls[name] = impl.load + return impl.load() + + from mako import exceptions + + raise exceptions.RuntimeException( + "Can't load plugin %s %s" % (self.group, name) + ) + + def register(self, name, modulepath, objname): + def load(): + mod = __import__(modulepath) + for token in modulepath.split(".")[1:]: + mod = getattr(mod, token) + return getattr(mod, objname) + + self.impls[name] = load + + +def verify_directory(dir_): + """create and/or verify a filesystem directory.""" + + tries = 0 + + while not os.path.exists(dir_): + try: + tries += 1 + os.makedirs(dir_, 0o755) + except: + if tries > 5: + raise + + +def to_list(x, default=None): + if x is None: + return default + if not isinstance(x, (list, tuple)): + return [x] + else: + return x + + +class memoized_property: + + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class memoized_instancemethod: + + """Decorate a method memoize its return value. + + Best applied to no-arg methods: memoization is not sensitive to + argument values, and will always return the same value even when + called with different arguments. + + """ + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + + def oneshot(*args, **kw): + result = self.fget(obj, *args, **kw) + + def memo(*a, **kw): + return result + + memo.__name__ = self.__name__ + memo.__doc__ = self.__doc__ + obj.__dict__[self.__name__] = memo + return result + + oneshot.__name__ = self.__name__ + oneshot.__doc__ = self.__doc__ + return oneshot + + +class SetLikeDict(dict): + + """a dictionary that has some setlike methods on it""" + + def union(self, other): + """produce a 'union' of this dict and another (at the key level). + + values in the second dict take precedence over that of the first""" + x = SetLikeDict(**self) + x.update(other) + return x + + +class FastEncodingBuffer: + + """a very rudimentary buffer that is faster than StringIO, + and supports unicode data.""" + + def __init__(self, encoding=None, errors="strict"): + self.data = collections.deque() + self.encoding = encoding + self.delim = "" + self.errors = errors + self.write = self.data.append + + def truncate(self): + self.data = collections.deque() + self.write = self.data.append + + def getvalue(self): + if self.encoding: + return self.delim.join(self.data).encode( + self.encoding, self.errors + ) + else: + return self.delim.join(self.data) + + +class LRUCache(dict): + + """A dictionary-like object that stores a limited number of items, + discarding lesser used items periodically. + + this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based + paradigm so that synchronization is not really needed. the size management + is inexact. + """ + + class _Item: + def __init__(self, key, value): + self.key = key + self.value = value + self.timestamp = timeit.default_timer() + + def __repr__(self): + return repr(self.value) + + def __init__(self, capacity, threshold=0.5): + self.capacity = capacity + self.threshold = threshold + + def __getitem__(self, key): + item = dict.__getitem__(self, key) + item.timestamp = timeit.default_timer() + return item.value + + def values(self): + return [i.value for i in dict.values(self)] + + def setdefault(self, key, value): + if key in self: + return self[key] + self[key] = value + return value + + def __setitem__(self, key, value): + item = dict.get(self, key) + if item is None: + item = self._Item(key, value) + dict.__setitem__(self, key, item) + else: + item.value = value + self._manage_size() + + def _manage_size(self): + while len(self) > self.capacity + self.capacity * self.threshold: + bytime = sorted( + dict.values(self), + key=operator.attrgetter("timestamp"), + reverse=True, + ) + for item in bytime[self.capacity :]: + try: + del self[item.key] + except KeyError: + # if we couldn't find a key, most likely some other thread + # broke in on us. loop around and try again + break + + +# Regexp to match python magic encoding line +_PYTHON_MAGIC_COMMENT_re = re.compile( + r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE +) + + +def parse_encoding(fp): + """Deduce the encoding of a Python source file (binary mode) from magic + comment. + + It does this in the same way as the `Python interpreter`__ + + .. __: http://docs.python.org/ref/encodings.html + + The ``fp`` argument should be a seekable file object in binary mode. + """ + pos = fp.tell() + fp.seek(0) + try: + line1 = fp.readline() + has_bom = line1.startswith(codecs.BOM_UTF8) + if has_bom: + line1 = line1[len(codecs.BOM_UTF8) :] + + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore")) + if not m: + try: + parse(line1.decode("ascii", "ignore")) + except (ImportError, SyntaxError): + # Either it's a real syntax error, in which case the source + # is not valid python source, or line2 is a continuation of + # line1, in which case we don't want to scan line2 for a magic + # comment. + pass + else: + line2 = fp.readline() + m = _PYTHON_MAGIC_COMMENT_re.match( + line2.decode("ascii", "ignore") + ) + + if has_bom: + if m: + raise SyntaxError( + "python refuses to compile code with both a UTF8" + " byte-order-mark and a magic encoding comment" + ) + return "utf_8" + elif m: + return m.group(1) + else: + return None + finally: + fp.seek(pos) + + +def sorted_dict_repr(d): + """repr() a dictionary with the keys in order. + + Used by the lexer unit test to compare parse trees based on strings. + + """ + keys = list(d.keys()) + keys.sort() + return "{" + ", ".join("%r: %r" % (k, d[k]) for k in keys) + "}" + + +def restore__ast(_ast): + """Attempt to restore the required classes to the _ast module if it + appears to be missing them + """ + if hasattr(_ast, "AST"): + return + _ast.PyCF_ONLY_AST = 2 << 9 + m = compile( + """\ +def foo(): pass +class Bar: pass +if False: pass +baz = 'mako' +1 + 2 - 3 * 4 / 5 +6 // 7 % 8 << 9 >> 10 +11 & 12 ^ 13 | 14 +15 and 16 or 17 +-baz + (not +18) - ~17 +baz and 'foo' or 'bar' +(mako is baz == baz) is not baz != mako +mako > baz < mako >= baz <= mako +mako in baz not in mako""", + "", + "exec", + _ast.PyCF_ONLY_AST, + ) + _ast.Module = type(m) + + for cls in _ast.Module.__mro__: + if cls.__name__ == "mod": + _ast.mod = cls + elif cls.__name__ == "AST": + _ast.AST = cls + + _ast.FunctionDef = type(m.body[0]) + _ast.ClassDef = type(m.body[1]) + _ast.If = type(m.body[2]) + + _ast.Name = type(m.body[3].targets[0]) + _ast.Store = type(m.body[3].targets[0].ctx) + _ast.Str = type(m.body[3].value) + + _ast.Sub = type(m.body[4].value.op) + _ast.Add = type(m.body[4].value.left.op) + _ast.Div = type(m.body[4].value.right.op) + _ast.Mult = type(m.body[4].value.right.left.op) + + _ast.RShift = type(m.body[5].value.op) + _ast.LShift = type(m.body[5].value.left.op) + _ast.Mod = type(m.body[5].value.left.left.op) + _ast.FloorDiv = type(m.body[5].value.left.left.left.op) + + _ast.BitOr = type(m.body[6].value.op) + _ast.BitXor = type(m.body[6].value.left.op) + _ast.BitAnd = type(m.body[6].value.left.left.op) + + _ast.Or = type(m.body[7].value.op) + _ast.And = type(m.body[7].value.values[0].op) + + _ast.Invert = type(m.body[8].value.right.op) + _ast.Not = type(m.body[8].value.left.right.op) + _ast.UAdd = type(m.body[8].value.left.right.operand.op) + _ast.USub = type(m.body[8].value.left.left.op) + + _ast.Or = type(m.body[9].value.op) + _ast.And = type(m.body[9].value.values[0].op) + + _ast.IsNot = type(m.body[10].value.ops[0]) + _ast.NotEq = type(m.body[10].value.ops[1]) + _ast.Is = type(m.body[10].value.left.ops[0]) + _ast.Eq = type(m.body[10].value.left.ops[1]) + + _ast.Gt = type(m.body[11].value.ops[0]) + _ast.Lt = type(m.body[11].value.ops[1]) + _ast.GtE = type(m.body[11].value.ops[2]) + _ast.LtE = type(m.body[11].value.ops[3]) + + _ast.In = type(m.body[12].value.ops[0]) + _ast.NotIn = type(m.body[12].value.ops[1]) + + +def read_file(path, mode="rb"): + with open(path, mode) as fp: + return fp.read() + + +def read_python_file(path): + fp = open(path, "rb") + try: + encoding = parse_encoding(fp) + data = fp.read() + if encoding: + data = data.decode(encoding) + return data + finally: + fp.close() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/INSTALLER b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/LICENSE b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..ec51537db27dd4d9c9ed3cd39fd96485f3cfddea --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/LICENSE @@ -0,0 +1,99 @@ +License agreement for matplotlib versions 1.3.0 and later +========================================================= + +1. This LICENSE AGREEMENT is between the Matplotlib Development Team +("MDT"), and the Individual or Organization ("Licensee") accessing and +otherwise using matplotlib software in source or binary form and its +associated documentation. + +2. Subject to the terms and conditions of this License Agreement, MDT +hereby grants Licensee a nonexclusive, royalty-free, world-wide license +to reproduce, analyze, test, perform and/or display publicly, prepare +derivative works, distribute, and otherwise use matplotlib +alone or in any derivative version, provided, however, that MDT's +License Agreement and MDT's notice of copyright, i.e., "Copyright (c) +2012- Matplotlib Development Team; All Rights Reserved" are retained in +matplotlib alone or in any derivative version prepared by +Licensee. + +3. In the event Licensee prepares a derivative work that is based on or +incorporates matplotlib or any part thereof, and wants to +make the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to matplotlib . + +4. MDT is making matplotlib available to Licensee on an "AS +IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB +WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR +LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING +MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF +THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between MDT and +Licensee. This License Agreement does not grant permission to use MDT +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using matplotlib , +Licensee agrees to be bound by the terms and conditions of this License +Agreement. + +License agreement for matplotlib versions prior to 1.3.0 +======================================================== + +1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the +Individual or Organization ("Licensee") accessing and otherwise using +matplotlib software in source or binary form and its associated +documentation. + +2. Subject to the terms and conditions of this License Agreement, JDH +hereby grants Licensee a nonexclusive, royalty-free, world-wide license +to reproduce, analyze, test, perform and/or display publicly, prepare +derivative works, distribute, and otherwise use matplotlib +alone or in any derivative version, provided, however, that JDH's +License Agreement and JDH's notice of copyright, i.e., "Copyright (c) +2002-2011 John D. Hunter; All Rights Reserved" are retained in +matplotlib alone or in any derivative version prepared by +Licensee. + +3. In the event Licensee prepares a derivative work that is based on or +incorporates matplotlib or any part thereof, and wants to +make the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to matplotlib. + +4. JDH is making matplotlib available to Licensee on an "AS +IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB +WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR +LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING +MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF +THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between JDH and +Licensee. This License Agreement does not grant permission to use JDH +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using matplotlib, +Licensee agrees to be bound by the terms and conditions of this License +Agreement. \ No newline at end of file diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/METADATA b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..49dfd37a2c6a647cd3d2bd68b8fc56edbe4764af --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/METADATA @@ -0,0 +1,215 @@ +Metadata-Version: 2.1 +Name: matplotlib +Version: 3.10.3 +Summary: Python plotting package +Author: John D. Hunter, Michael Droettboom +Author-Email: Unknown +License: License agreement for matplotlib versions 1.3.0 and later + ========================================================= + + 1. This LICENSE AGREEMENT is between the Matplotlib Development Team + ("MDT"), and the Individual or Organization ("Licensee") accessing and + otherwise using matplotlib software in source or binary form and its + associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, MDT + hereby grants Licensee a nonexclusive, royalty-free, world-wide license + to reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use matplotlib + alone or in any derivative version, provided, however, that MDT's + License Agreement and MDT's notice of copyright, i.e., "Copyright (c) + 2012- Matplotlib Development Team; All Rights Reserved" are retained in + matplotlib alone or in any derivative version prepared by + Licensee. + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates matplotlib or any part thereof, and wants to + make the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to matplotlib . + + 4. MDT is making matplotlib available to Licensee on an "AS + IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB + WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR + LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING + MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF + THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between MDT and + Licensee. This License Agreement does not grant permission to use MDT + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using matplotlib , + Licensee agrees to be bound by the terms and conditions of this License + Agreement. + + License agreement for matplotlib versions prior to 1.3.0 + ======================================================== + + 1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the + Individual or Organization ("Licensee") accessing and otherwise using + matplotlib software in source or binary form and its associated + documentation. + + 2. Subject to the terms and conditions of this License Agreement, JDH + hereby grants Licensee a nonexclusive, royalty-free, world-wide license + to reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use matplotlib + alone or in any derivative version, provided, however, that JDH's + License Agreement and JDH's notice of copyright, i.e., "Copyright (c) + 2002-2011 John D. Hunter; All Rights Reserved" are retained in + matplotlib alone or in any derivative version prepared by + Licensee. + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates matplotlib or any part thereof, and wants to + make the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to matplotlib. + + 4. JDH is making matplotlib available to Licensee on an "AS + IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB + WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR + LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING + MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF + THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between JDH and + Licensee. This License Agreement does not grant permission to use JDH + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using matplotlib, + Licensee agrees to be bound by the terms and conditions of this License + Agreement. +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: Matplotlib +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Scientific/Engineering :: Visualization +Project-URL: Homepage, https://matplotlib.org +Project-URL: Download, https://matplotlib.org/stable/install/index.html +Project-URL: Documentation, https://matplotlib.org +Project-URL: Source Code, https://github.com/matplotlib/matplotlib +Project-URL: Bug Tracker, https://github.com/matplotlib/matplotlib/issues +Project-URL: Forum, https://discourse.matplotlib.org/ +Project-URL: Donate, https://numfocus.org/donate-to-matplotlib +Requires-Python: >=3.10 +Requires-Dist: contourpy>=1.0.1 +Requires-Dist: cycler>=0.10 +Requires-Dist: fonttools>=4.22.0 +Requires-Dist: kiwisolver>=1.3.1 +Requires-Dist: numpy>=1.23 +Requires-Dist: packaging>=20.0 +Requires-Dist: pillow>=8 +Requires-Dist: pyparsing>=2.3.1 +Requires-Dist: python-dateutil>=2.7 +Provides-Extra: dev +Requires-Dist: meson-python<0.17.0,>=0.13.1; extra == "dev" +Requires-Dist: pybind11!=2.13.3,>=2.13.2; extra == "dev" +Requires-Dist: setuptools_scm>=7; extra == "dev" +Requires-Dist: setuptools>=64; extra == "dev" +Description-Content-Type: text/markdown + +[![PyPi](https://img.shields.io/pypi/v/matplotlib)](https://pypi.org/project/matplotlib/) +[![Conda](https://img.shields.io/conda/vn/conda-forge/matplotlib)](https://anaconda.org/conda-forge/matplotlib) +[![Downloads](https://img.shields.io/pypi/dm/matplotlib)](https://pypi.org/project/matplotlib) +[![NUMFocus](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org) + +[![Discourse help forum](https://img.shields.io/badge/help_forum-discourse-blue.svg)](https://discourse.matplotlib.org) +[![Gitter](https://badges.gitter.im/matplotlib/matplotlib.svg)](https://gitter.im/matplotlib/matplotlib) +[![GitHub issues](https://img.shields.io/badge/issue_tracking-github-blue.svg)](https://github.com/matplotlib/matplotlib/issues) +[![Contributing](https://img.shields.io/badge/PR-Welcome-%23FF8300.svg?)](https://matplotlib.org/stable/devel/index.html) + +[![GitHub actions status](https://github.com/matplotlib/matplotlib/workflows/Tests/badge.svg)](https://github.com/matplotlib/matplotlib/actions?query=workflow%3ATests) +[![Azure pipelines status](https://dev.azure.com/matplotlib/matplotlib/_apis/build/status/matplotlib.matplotlib?branchName=main)](https://dev.azure.com/matplotlib/matplotlib/_build/latest?definitionId=1&branchName=main) +[![AppVeyor status](https://ci.appveyor.com/api/projects/status/github/matplotlib/matplotlib?branch=main&svg=true)](https://ci.appveyor.com/project/matplotlib/matplotlib) +[![Codecov status](https://codecov.io/github/matplotlib/matplotlib/badge.svg?branch=main&service=github)](https://app.codecov.io/gh/matplotlib/matplotlib) +[![EffVer Versioning](https://img.shields.io/badge/version_scheme-EffVer-0097a7)](https://jacobtomlinson.dev/effver) + +![Matplotlib logotype](https://matplotlib.org/_static/logo2.svg) + +Matplotlib is a comprehensive library for creating static, animated, and +interactive visualizations in Python. + +Check out our [home page](https://matplotlib.org/) for more information. + +![image](https://matplotlib.org/_static/readme_preview.png) + +Matplotlib produces publication-quality figures in a variety of hardcopy +formats and interactive environments across platforms. Matplotlib can be +used in Python scripts, Python/IPython shells, web application servers, +and various graphical user interface toolkits. + +## Install + +See the [install +documentation](https://matplotlib.org/stable/users/installing/index.html), +which is generated from `/doc/install/index.rst` + +## Contribute + +You've discovered a bug or something else you want to change — excellent! + +You've worked out a way to fix it — even better! + +You want to tell us about it — best of all! + +Start at the [contributing +guide](https://matplotlib.org/devdocs/devel/contribute.html)! + +## Contact + +[Discourse](https://discourse.matplotlib.org/) is the discussion forum +for general questions and discussions and our recommended starting +point. + +Our active mailing lists (which are mirrored on Discourse) are: + +- [Users](https://mail.python.org/mailman/listinfo/matplotlib-users) + mailing list: +- [Announcement](https://mail.python.org/mailman/listinfo/matplotlib-announce) + mailing list: +- [Development](https://mail.python.org/mailman/listinfo/matplotlib-devel) + mailing list: + +[Gitter](https://gitter.im/matplotlib/matplotlib) is for coordinating +development and asking questions directly related to contributing to +matplotlib. + +## Citing Matplotlib + +If Matplotlib contributes to a project that leads to publication, please +acknowledge this by citing Matplotlib. + +[A ready-made citation +entry](https://matplotlib.org/stable/users/project/citing.html) is +available. diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/RECORD b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..bf33d01e3ae8a31786cf028458e535437b58c03e --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/RECORD @@ -0,0 +1,596 @@ +matplotlib-3.10.3.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +matplotlib-3.10.3.dist-info/LICENSE,sha256=WhqB6jAXKMi7opM9qDLAzWIina8giToCSrPVMkRGjbw,4830 +matplotlib-3.10.3.dist-info/METADATA,sha256=qkW_YmnN-SloTyASHYvAbp53RWGAaintL1IWgoFW7XI,11268 +matplotlib-3.10.3.dist-info/RECORD,, +matplotlib-3.10.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib-3.10.3.dist-info/WHEEL,sha256=sZM_NeUMz2G4fDenMf11eikcCxcLaQWiYRmjwQBavQs,137 +matplotlib/__init__.py,sha256=bn_36OtZ4TCX21uj49jldcZ5VhbgOgpWGwQolyODom0,55633 +matplotlib/__init__.pyi,sha256=s0YJ8fkGVioZL9bcBtyYgCq_MbY8LRrQUtmaD3XaIxQ,3439 +matplotlib/_afm.py,sha256=cWe1Ib37T6ZyHbR6_hPuzAjotMmi32y-kDB-i28iyqE,16692 +matplotlib/_animation_data.py,sha256=JJJbbc-fMdPjkbQ7ng9BHL5i91VTDHQVTtEdWOvWBAI,7986 +matplotlib/_api/__init__.py,sha256=7Fs57FtnCmbyr_NwuGWe0EhfXOYrBw22iKMU9onGc_k,13799 +matplotlib/_api/__init__.pyi,sha256=XNL-oGkk1MZPtSXk3rHlC3jLWZxsElmpNpOongKq8qA,2246 +matplotlib/_api/deprecation.py,sha256=XJxksSV8ukS8kSVmsGPS0zgv1ffFa8WNaRLAk31dmOM,20091 +matplotlib/_api/deprecation.pyi,sha256=A8De57amX2GlZSrDYwVYCMxHFb8AsYGrH0k_bCxuJus,2217 +matplotlib/_blocking_input.py,sha256=VHNsxvX2mTx_xBknd30MSicVlRXS4dCDe9hDctbV5rk,1224 +matplotlib/_c_internal_utils.cpython-310-x86_64-linux-gnu.so,sha256=ZDwgjzyxQELgtvPYKqMfsJT8VvYb7s7gqrAAHMRFgKs,251392 +matplotlib/_c_internal_utils.pyi,sha256=Z3bLs9pMGXrmZjt-4_A-x4321bLP-B54xDbr4PIgUfc,377 +matplotlib/_cm.py,sha256=PuYIAkUpz4u4aiUjvdV5njIfG0J_MQY9pM2Yz3j3KXs,68014 +matplotlib/_cm_bivar.py,sha256=gpmKiSxsWoVGWIifseh0goND7Y7zTWJKsznr_QkEtDg,97461 +matplotlib/_cm_listed.py,sha256=3a02mPUSnOsXkqRFNxKGwIvJCAKY5lezBUrqGcllnvk,135004 +matplotlib/_cm_multivar.py,sha256=0UjNFW7Sytj1t31QD99ebfr-al7NmCpWhKUOyq9VzK8,6630 +matplotlib/_color_data.py,sha256=k-wdTi6ArJxksqBfMT-7Uy2qWz8XX4Th5gsjf32CwmM,34780 +matplotlib/_color_data.pyi,sha256=RdBRk01yuf3jYVlCwG351tIBCxehizkZMnKs9c8gnOw,170 +matplotlib/_constrained_layout.py,sha256=Oci1LYPuos_Mb8YxTMTgfae10UqNV7fdD3qugm8NvNA,31403 +matplotlib/_docstring.py,sha256=u9yJorJidI8k1W8S01SIMqNmh5VOMVc-AqyES66EEdk,4435 +matplotlib/_docstring.pyi,sha256=6ze5DoqZaFy6BQ5Z8vtDiJwSOXhBcpZJlkM5enI9cuE,800 +matplotlib/_enums.py,sha256=euD2sj2FIbQMzIPA4rCrII_y8RVzaMyEBKsSdBQaUb4,6175 +matplotlib/_enums.pyi,sha256=K7j_kDwGOnx37CYnXhwfJ9NJBGeet45KvAj0om05RUs,326 +matplotlib/_fontconfig_pattern.py,sha256=2livocARMbpys8tmicH6wlifcwcNoIstgruSY6DSAfk,4361 +matplotlib/_image.cpython-310-x86_64-linux-gnu.so,sha256=1PNEPVC5HC-HInlvdum4FkQBUbt4NnFzCRyfq0gKJNo,555016 +matplotlib/_image.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/_internal_utils.py,sha256=nhK6LLWYW93fBcsFiO09JmqFj2rgHEsGYFOeaC7HRKw,2140 +matplotlib/_layoutgrid.py,sha256=mFB9asZVol2aV9hLBpdntuG5x1isKUrajF-3TUhCgso,21676 +matplotlib/_mathtext.py,sha256=JMhC1YX6U-Go409F3JsDXpao-6iP3H93XPXPiQESP_c,107842 +matplotlib/_mathtext_data.py,sha256=9y__7jf3bzgOmD2lEYW3QpgsT-z9QDuiwRLVH7Wq8Pw,65067 +matplotlib/_path.cpython-310-x86_64-linux-gnu.so,sha256=aiihJK8nkcXr3rc2Cryx2bwacBpj4EK40hXmj_Ja5VU,486904 +matplotlib/_path.pyi,sha256=yznyfzoUogH9vvi0vK68ga4Shlbrn5UBhAnLX8Ght1o,325 +matplotlib/_pylab_helpers.py,sha256=pJERytHDmXo2VP3sH9Qw6NwJP0hefNPQMUXCVntnoHI,4307 +matplotlib/_pylab_helpers.pyi,sha256=7OZKr-OL3ipVt1EDZ6e-tRwHASz-ijYfcIdlPczXhvQ,1012 +matplotlib/_qhull.cpython-310-x86_64-linux-gnu.so,sha256=8RL7aYZmfgOLNoz6u3zu5Hm0PFz97AJeIts6sQyqfxU,745280 +matplotlib/_qhull.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/_text_helpers.py,sha256=nz1pvEp8756PCrn6KIY2M6vVA-beYUNODvvf4NnGF0g,2538 +matplotlib/_tight_bbox.py,sha256=ddJ5ViXulbPLocHr-RkWK27WJSuV2WXUx74jZyL0NOg,2787 +matplotlib/_tight_layout.py,sha256=A3vZKBmxci7w35OO9lxgfUqrRKgTr-N_dUza0M05WxE,12675 +matplotlib/_tri.cpython-310-x86_64-linux-gnu.so,sha256=XsEXp77lnqLPRzEcwFFwJSu9wbZIUe4V_YDad3_DVQo,411008 +matplotlib/_tri.pyi,sha256=bFnu97pIlSboJT3ijQBw4YlZdlR1UzXYtI3DkEPVzeM,1429 +matplotlib/_type1font.py,sha256=c_PXvyOFQ4YCRQJ23snWNkdEhuZ4k0JbLe55zaeoXfQ,28409 +matplotlib/_version.py,sha256=g0JScnasC4-IPTQGVDcnvq-usEhkFWYJroH5FWeyAEI,19 +matplotlib/animation.py,sha256=58vUSy_0M2Ve2GO5JcXsCHxCwr7TqaaMed-KNxN6Nno,73114 +matplotlib/animation.pyi,sha256=C8Fgmswh4um-qYgqPZNBEpXVaefRXi6e026pYjonLHg,6566 +matplotlib/artist.py,sha256=hfCGETRkHLfP-i4PmmNsc7syXyFTZi4HYvzyE8q9ak0,63293 +matplotlib/artist.pyi,sha256=9sR0w8Kvi1GaFV6IbUzOw9LvQkPEB8FP_uRvUcI6_hg,7336 +matplotlib/axes/__init__.py,sha256=aHE_zIjphIJkW4_1fyoFuEWTb7gCRewK5jNZWJHEdgM,351 +matplotlib/axes/__init__.pyi,sha256=HP1z2v-PboHQS4dQjvJ7XjUjX-zw6taZRTTB9oVKwYE,303 +matplotlib/axes/_axes.py,sha256=I7TJgEB4v0DGYPVwN-2FkYHREinTLKB0VyHBVvQxi8s,353757 +matplotlib/axes/_axes.pyi,sha256=fvlVj8AnKt52EOCgxJ5N_6_lb9n0gmXlOP7WGf2puNI,26116 +matplotlib/axes/_base.py,sha256=oqAAqzpqcI8PCdM3AC-cYGvXtuUwgdBBiwf88y1qVuE,185372 +matplotlib/axes/_base.pyi,sha256=CGHbNwWXY1wIT4bGqRBc0JJ9vUkYD29tkmbA3Ulw-co,17051 +matplotlib/axes/_secondary_axes.py,sha256=Sew-LK57mDndUXb-9_VHzs5dJLQ6fh4nstZtfpZjui0,11887 +matplotlib/axes/_secondary_axes.pyi,sha256=GtU55YzLNN7XHMRQcAmcAxcdcN-FzHw1RAJpOiTcZFk,1414 +matplotlib/axis.py,sha256=bsPEyn4eMAnkTCbW4rxSm8ysPNYH2p_ur7RN_ZfZRps,104714 +matplotlib/axis.pyi,sha256=UO1-oCeHyB7Mrxxnuo0Qys2u_k8e6qTmz4B6-rHrWdw,10181 +matplotlib/backend_bases.py,sha256=-41KSWRkMNlTne6c5q-hOOn4vW6rdNtOtA1GIIuu7rY,131838 +matplotlib/backend_bases.pyi,sha256=G6MqvmWf-y2SwhuhNF2qN8hmovxAkV1TJMbg4LzU9cM,16270 +matplotlib/backend_managers.py,sha256=RQheCO_cQBlaWsYMbAmswu0UPKU7bmLTI5LEFgotklA,11795 +matplotlib/backend_managers.pyi,sha256=agnuM0wiZRqSgqti2AgbKJijRLvEPNLOrSY8PEwLjFE,2253 +matplotlib/backend_tools.py,sha256=eCx84l8JBYayju5Ry1LUFyKLIrdgxmGUTJw72VNP1yo,33186 +matplotlib/backend_tools.pyi,sha256=lc3W6FcY0XhTb8Z8XAqQ6MBk_SV7WH41t7zjwAlQfmE,4122 +matplotlib/backends/__init__.py,sha256=JowJe-tDrUBMNJTiJATgiEuACpgdxsKnRCYa-nC255A,206 +matplotlib/backends/_backend_agg.cpython-310-x86_64-linux-gnu.so,sha256=46NhH51zmGijS3Z_uFOHxrIAYowUfa6IGnugIkTGCyg,741680 +matplotlib/backends/_backend_agg.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/backends/_backend_gtk.py,sha256=zNCmMOgjgZGfAIlxDU4ph02XMSSn0wIUoPQ4wsaeeAg,11274 +matplotlib/backends/_backend_pdf_ps.py,sha256=M0kjQpMlBoAim6Pdw8u3FqUIoXz4UwRv4mSzXPJPI44,5968 +matplotlib/backends/_backend_tk.py,sha256=q8bdB4u3vGROJsYNWQv2WmyrEy1zBavcjfmKI-MSKp8,43291 +matplotlib/backends/_macosx.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/backends/_tkagg.cpython-310-x86_64-linux-gnu.so,sha256=mYgtDi3PitMdGcRmmj-NCs9_VHGdoI040HGxBlOqk6c,279120 +matplotlib/backends/_tkagg.pyi,sha256=1yKnYKSgNFesrb0ZI5RQCyTO4zONgHW5MrJ9vq4HzyI,379 +matplotlib/backends/backend_agg.py,sha256=Py_rXRbDTP0ZSrNB2O5Tmh5V8G5V9ViXH_YV5PTX9xw,19888 +matplotlib/backends/backend_cairo.py,sha256=Ah5M85Ppa8zYGOtLOizW0gzltjs3b4nD56qFeQR795U,18618 +matplotlib/backends/backend_gtk3.py,sha256=ZHf4CKhDGJvl8J3p6Sx_ZJBTgVetZBbaKWfnXKvNe0c,22277 +matplotlib/backends/backend_gtk3agg.py,sha256=aFJw05L5V-FyFSj_20l_FuC5piPf8fhKzft_m1NMhYc,2463 +matplotlib/backends/backend_gtk3cairo.py,sha256=zPsJzVm750if2LiQ9ybzsbX0rBhkP05XVvR9Lhz65so,1392 +matplotlib/backends/backend_gtk4.py,sha256=3KWHdU9UghUjEt2v3wgQNm7Yr-DrLVBYWcKNaoyfIYc,23026 +matplotlib/backends/backend_gtk4agg.py,sha256=00i3qpIt9Tcf_S74GOWbeckiPlfVJoQ2pBbhXDMthF0,1262 +matplotlib/backends/backend_gtk4cairo.py,sha256=sqWm3WgfNO8EsBjqzsD4U4cAc0f4q5SufYx7ZJacDf0,1125 +matplotlib/backends/backend_macosx.py,sha256=Gjbu1MKXIZpCM_luMG6U7qDgwotcGSegCvYw-NNWxeQ,7397 +matplotlib/backends/backend_mixed.py,sha256=Gf_2BDjy94FZRXALm0X3xRpK_1l8bZGSYCqnrSjrjME,4696 +matplotlib/backends/backend_nbagg.py,sha256=Au9RHfRufpI0ngT4R0K0CUVtAMFi9Bg-YhDunlj_Lko,8000 +matplotlib/backends/backend_pdf.py,sha256=RCpic_1TWngt6ZC4YfscnSXXTndAXWt9qFl0v1aLiiM,105317 +matplotlib/backends/backend_pgf.py,sha256=GT7UHkAtHtnZcyV0GxHtUL9TTre9dji7Q6qexyi4AUU,39567 +matplotlib/backends/backend_ps.py,sha256=JRZZ6hsPcCfZmb-HczqyBulgow15yvoaHQe3Zcl1Zd4,51991 +matplotlib/backends/backend_qt.py,sha256=wEkQo5b5BQ6uzAXzLWIKBI1gINZx6-Y7h0jTJ3kkuMI,41541 +matplotlib/backends/backend_qt5.py,sha256=kzfoo2ksEGsiWAa2LGtZYzKvfzqJJWyGOohohcRAu1g,787 +matplotlib/backends/backend_qt5agg.py,sha256=Vh7H8kqWH4X8a3VX2XZ2Vze9srwJavkNHAZxdJUz_bk,352 +matplotlib/backends/backend_qt5cairo.py,sha256=Go2Y0GVkXh1xh6x4F255_e5Xbwwws-OiD1Fc0805E78,292 +matplotlib/backends/backend_qtagg.py,sha256=ZjPtp5wR6tZGjbngPXRdVXYRhiPPrc5C0q2DmtdRkpY,3413 +matplotlib/backends/backend_qtcairo.py,sha256=e3SUG50VGqo68eS_8ebTCVQPa4AaxLxuo1JiWX4TIWg,1770 +matplotlib/backends/backend_svg.py,sha256=RQvPE9pR0xSu55NIMajX1WBqmmXbUWWwdWr2qMG-63k,51000 +matplotlib/backends/backend_template.py,sha256=qhWvXGiPeO2jvH61L3NCRYLReo--d4WJrQYanIaEmiE,8012 +matplotlib/backends/backend_tkagg.py,sha256=z9gB16fq2d-DUNpbeSDDLWaYmc0Jz3cDqNlBKhnQg0c,592 +matplotlib/backends/backend_tkcairo.py,sha256=JaGGXh8Y5FwVZtgryIucN941Olf_Pn6f4Re7Vuxl1-c,845 +matplotlib/backends/backend_webagg.py,sha256=YzNscpsID8yu0XYO3NXiZLMPZnDKMAKdG_TO2Ya3g4k,11022 +matplotlib/backends/backend_webagg_core.py,sha256=RZisxLS_pocuBRr5_lqBnF91flQA5upI8PiP8-Zitl0,18748 +matplotlib/backends/backend_wx.py,sha256=Y3IM1DEtPTeXOqkyLIGmNuKqDReyHMTtHzqTuwdfPhM,51296 +matplotlib/backends/backend_wxagg.py,sha256=tzcwYyW34j4LPfHm9uhuHwepwZIcspi3y8oPC8FJkdk,1468 +matplotlib/backends/backend_wxcairo.py,sha256=TK-m3S0c1WipfKE2IpIPNeE4hoXPjfMvnWAzHpCXpFs,848 +matplotlib/backends/qt_compat.py,sha256=t_4aD4rg5maItXABLtVG6JIfuQwZnHwhMcjtz6AUwQs,5346 +matplotlib/backends/qt_editor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/backends/qt_editor/_formlayout.py,sha256=QmqqqLO6waqeSGOKjDNUwjvon53Z7yqil5AfqfftDWY,20953 +matplotlib/backends/qt_editor/figureoptions.py,sha256=ljd7fCBiE0kk2ATNTaB5Dg8U6Gn7Lz4Vhi3XX-VbXeE,9851 +matplotlib/backends/registry.py,sha256=M2BHhBKbUJ8E2B2QExfVf8NbAK4ZFhJtmKRelBZZDh4,15480 +matplotlib/backends/web_backend/all_figures.html,sha256=44Y-GvIJbNlqQaKSW3kwVKpTxBSG1WsdYz3ZmYHlUsA,1753 +matplotlib/backends/web_backend/css/boilerplate.css,sha256=qui16QXRnQFNJDbcMasfH6KtN9hLjv8883U9cJmsVCE,2310 +matplotlib/backends/web_backend/css/fbm.css,sha256=wa4vNkNv7fQ_TufjJjecFZEzPMR6W8x6uXJga_wQILw,1456 +matplotlib/backends/web_backend/css/mpl.css,sha256=ruca_aA5kNnP-MZmLkriu8teVP1nIgwcFEpoB16j8Z4,1611 +matplotlib/backends/web_backend/css/page.css,sha256=ca3nO3TaPw7865PN5SlGJBTc2H3rBXQCMaFPywX29y4,1623 +matplotlib/backends/web_backend/ipython_inline_figure.html,sha256=wgSxUh3xpPAxOnZgSMnrhDM5hYncOfWRGgaCUezvedY,1311 +matplotlib/backends/web_backend/js/mpl.js,sha256=YhJ_95_irTNrwlnDMRWGBebRb0DAhVBzj_96fL9Dsqs,24408 +matplotlib/backends/web_backend/js/mpl_tornado.js,sha256=Zs2Uzs7YUilG765nYvanCo-IK8HkHDtIum1KAq6bQ_w,302 +matplotlib/backends/web_backend/js/nbagg_mpl.js,sha256=F-By4ZjOSmwpNAkxUUxUk35qCjGlf0B28Z0aOyfpxDM,9514 +matplotlib/backends/web_backend/single_figure.html,sha256=wEBwF602JLHErBUEXiS6jXqmxYAIzHpa3MMFrnev6rs,1357 +matplotlib/bezier.py,sha256=ahJ1BRJoTbcxJoNmF1mCYc96mWs5jSM4DOK58jYaXkU,19049 +matplotlib/bezier.pyi,sha256=itubQt_U1IKnQ508bhy6J8LSpaspvZIeOmJqg7JvOmU,2586 +matplotlib/category.py,sha256=UsZ8rbADAH96YfpjD-N1DLTcS_cQwlszIIQ2ZRaiWm4,7377 +matplotlib/cbook.py,sha256=HAr8_jGa7m2Dg87TIiKqrVC-IrtbLPkiCK3AMPBOPLw,80161 +matplotlib/cbook.pyi,sha256=YkPZz_bsTYQJQ_wKx-qbsJYLhy2t2nCyvLNjJyiRtpw,6037 +matplotlib/cm.py,sha256=5xav-BvJ4pWSVdvGql4oYc4EpA-R6OuUP5xjrZCMKvg,10350 +matplotlib/cm.pyi,sha256=Ot5j2zDHYTra4kKuTwxYp7X5TsNwg_cJuYa7BxmQz-A,939 +matplotlib/collections.py,sha256=5AcX3lTsKD1ObSOkLr-irzYPfdJxsNrgw5dAD886vCE,96311 +matplotlib/collections.pyi,sha256=uZ4fPdq34oghdEtS1UkY7P0oOE2oPUzvS0Cr09yktaQ,10775 +matplotlib/colorbar.py,sha256=LVhceZdbo6km3N6d8gyJGDnpmE2xDgsPLo3CE7PzgCQ,60620 +matplotlib/colorbar.pyi,sha256=C1NzjMWFKm01rPUUr15Yjz1WB0pGakJQeatJO7G0mNs,4966 +matplotlib/colorizer.py,sha256=RzE0zjqB6elpketKidy6n-RRDbYF98speEIOIMfIFb4,25180 +matplotlib/colorizer.pyi,sha256=B_5-rEvNo4DblSifLodf3OVVEc3bTGsIYqqaQdU_4Tw,3308 +matplotlib/colors.py,sha256=h1YV59y4WvstOPC0x8eZzwHYItal7VjjVcyVS3niaSM,137303 +matplotlib/colors.pyi,sha256=XhCi3MtgSozOY5FATtPUHyKO4xMrvi7TvbCAl714Uxc,14908 +matplotlib/container.py,sha256=Y6v4j79gMk8QDYfrdOqbbJHH0BoOIO9enz4dtlaBSJU,4565 +matplotlib/container.pyi,sha256=DdthHVj1-bQQV3VcpMD1uIPLXVUKyKhWMXy8yCniK1I,1805 +matplotlib/contour.py,sha256=wpMtrMZEiDYPRuFbZ592HvQqn4NOBySfM17detKoenY,68391 +matplotlib/contour.pyi,sha256=X4DSH0u1YFUvKxuoAv8YbmVOJTJ-wbHig4lznzhsI0U,5300 +matplotlib/dates.py,sha256=xfcFY3g3AsW9isnEIKFGj4gXq0Xeoxox_3fxNCDQ01c,66306 +matplotlib/dviread.py,sha256=BqQg0fqXD_TmABJPSWDC-H2azt7B53NfOE1SC6mbXiI,42590 +matplotlib/dviread.pyi,sha256=5fps3GiPf1ibKY1TH6__kNEiJ9xhpVgzgait6jo09Hc,2139 +matplotlib/figure.py,sha256=CNPMl0QCxhr3h3WGcd271pE4Xtv4rapRWuUSFSNJSuc,141862 +matplotlib/figure.pyi,sha256=HWw1H_xICfG748BpSP32SVgF_mATqEzOdDZ87aY8X-A,14738 +matplotlib/font_manager.py,sha256=tSpOlZqiNUdIn3gQCFN_-nOLZ08WXBqj_unP6o1JyIw,57651 +matplotlib/font_manager.pyi,sha256=Jse1IshOdjhXHdjcvTxtos3rgsuECbRJzc264A-ZGus,5052 +matplotlib/ft2font.cpython-310-x86_64-linux-gnu.so,sha256=yuZFHHrQG6H_lm9sxouyeLljAXwd8Md4bq6K7YEGTgs,1423392 +matplotlib/ft2font.pyi,sha256=yuDEfdw-7VyFuAHts-sMRm4VKV7IxYresBxEQgNOaPM,9253 +matplotlib/gridspec.py,sha256=s6RkXEk4ORWsMHjXkK98ZvosZ5D-qgenVNk0hIvMwOw,29786 +matplotlib/gridspec.pyi,sha256=3IQi_Q5KdwyPT_J_FWcngJ7iUOOCH4PSm42USm16Ofg,5099 +matplotlib/hatch.py,sha256=tfEM0DQxyLkUE87lHduMNlE55GKINtAZo4Y_YsAmSJo,7453 +matplotlib/hatch.pyi,sha256=OEFkNe8TgrBli_MDMW99Tvhr44S4ySPaYQomjVXpUUA,2098 +matplotlib/image.py,sha256=MLFg06J2xllm1tmgDnlLYw8gTy7__NGnkf4SZP-3mSs,69765 +matplotlib/image.pyi,sha256=R88aGGu6wP0lFKRudaM7iNxGgq4kh9uX9QRaZSorTLQ,7066 +matplotlib/inset.py,sha256=fJV8VMMY07_97iJR3_RO892WAKE7myfV_Gqg-qK99vg,10154 +matplotlib/inset.pyi,sha256=c-IVW14bpqb3qu7bz0EabvNXJnZwJAXEXoQoIcrL8ns,968 +matplotlib/layout_engine.py,sha256=mwRNh13mIQ-wpjQVgBaB_LWKN1AiQTWldg-X9nLmVok,11433 +matplotlib/layout_engine.pyi,sha256=9cKFTFPCvwmKIygTUvrvOq1iWnFYme9Shniiv2QbC74,1788 +matplotlib/legend.py,sha256=sCnDN8KNo6mYcYqhdQynNUStFvrWbMMGtv_rxgua5Qc,55311 +matplotlib/legend.pyi,sha256=hn-MNF3SPHtSUqIhwVXTebU_Nzk_wIh5iKgf7AEAZRg,5364 +matplotlib/legend_handler.py,sha256=lW8sQxdFW4-Bup0JUZvlpiIrAvczeNm6ih5BlCrdQnM,29931 +matplotlib/legend_handler.pyi,sha256=3VEfeioGIAxhd3mhg4PXETZjCKf4OlXL0jz1MAFGtos,7655 +matplotlib/lines.py,sha256=BG3xhTuzYDFjzf-k5CcQc_V7W4HlQl4CL7RCQQz0EC8,57920 +matplotlib/lines.pyi,sha256=3tG7tD8GZ8YPphw4nmkgS9SfdLV8icW36bv61jb9RlU,6081 +matplotlib/markers.py,sha256=g6ukerZ5n_sD_Enk0eJA4kkTvOOp8AfSGBq60PHot-E,33708 +matplotlib/markers.pyi,sha256=FFFBsvilnbd8-5L04U70kfVoyBwc18w_fZ6DysTj9p0,1678 +matplotlib/mathtext.py,sha256=yTlpUfnKxzrFl7XNjyuC-xMBBnxUufU6GVwQyhQAI2o,5104 +matplotlib/mathtext.pyi,sha256=RCVxYGQ_CJ6wC7v_HkqoouU2PhtcvlJ1ffIyxzAC-so,1045 +matplotlib/mlab.py,sha256=T71p4x0KJNVGrwUuoDdgcjWQucIQTuVrocOC98Zdwsc,30210 +matplotlib/mlab.pyi,sha256=mkR7wbJS9eCQfCFsUWoXnqrAy0kcE3cVqpFZCeOGC_Q,3583 +matplotlib/mpl-data/fonts/afm/cmex10.afm,sha256=blR3ERmrVBV5XKkAnDCj4NMeYVgzH7cXtJ3u59u9GuE,12070 +matplotlib/mpl-data/fonts/afm/cmmi10.afm,sha256=5qwEOpedEo76bDUahyuuF1q0cD84tRrX-VQ4p3MlfBo,10416 +matplotlib/mpl-data/fonts/afm/cmr10.afm,sha256=WDvgC_D3UkGJg9u-J0U6RaT02lF4oz3lQxHtg1r3lYw,10101 +matplotlib/mpl-data/fonts/afm/cmsy10.afm,sha256=AbmzvCVWBceHRfmRfeJ9E6xzOQTFLk0U1zDfpf3_MaM,8295 +matplotlib/mpl-data/fonts/afm/cmtt10.afm,sha256=4ji7_mTpeWMa93o_UHBWPKCnqsBfhJJNllat1lJArP4,6501 +matplotlib/mpl-data/fonts/afm/pagd8a.afm,sha256=jjFrigwkTpYLqa26cpzZvKQNBo-PuF4bmDVqaM4pMWw,17183 +matplotlib/mpl-data/fonts/afm/pagdo8a.afm,sha256=sgNQdeYyx8J-itGw9h31y95aMBiTCRvmNSPTXwwS7xg,17255 +matplotlib/mpl-data/fonts/afm/pagk8a.afm,sha256=ZUtfHPloNqcvGMHMxaKDSlshhOcjwheUx143RwpGdIU,17241 +matplotlib/mpl-data/fonts/afm/pagko8a.afm,sha256=Yj1wBg6Jsqqz1KBfhRoJ3ACR-CMQol8Fj_ZM5NZ1gDk,17346 +matplotlib/mpl-data/fonts/afm/pbkd8a.afm,sha256=Zl5o6J_di9Y5j2EpHtjew-_sfg7-WoeVmO9PzOYSTUc,15157 +matplotlib/mpl-data/fonts/afm/pbkdi8a.afm,sha256=JAOno930iTyfZILMf11vWtiaTgrJcPpP6FRTRhEMMD4,15278 +matplotlib/mpl-data/fonts/afm/pbkl8a.afm,sha256=UJqJjOJ6xQDgDBLX157mKpohIJFVmHM-N6x2-DiGv14,15000 +matplotlib/mpl-data/fonts/afm/pbkli8a.afm,sha256=AWislZ2hDbs0ox_qOWREugsbS8_8lpL48LPMR40qpi0,15181 +matplotlib/mpl-data/fonts/afm/pcrb8a.afm,sha256=6j1TS2Uc7DWSc-8l42TGDc1u0Fg8JspeWfxFayjUwi8,15352 +matplotlib/mpl-data/fonts/afm/pcrbo8a.afm,sha256=smg3mjl9QaBDtQIt06ko5GvaxLsO9QtTvYANuE5hfG0,15422 +matplotlib/mpl-data/fonts/afm/pcrr8a.afm,sha256=7nxFr0Ehz4E5KG_zSE5SZOhxRH8MyfnCbw-7x5wu7tw,15339 +matplotlib/mpl-data/fonts/afm/pcrro8a.afm,sha256=NKEz7XtdFkh9cA8MvY-S3UOZlV2Y_J3tMEWFFxj7QSg,15443 +matplotlib/mpl-data/fonts/afm/phvb8a.afm,sha256=NAx4M4HjL7vANCJbc-tk04Vkol-T0oaXeQ3T2h-XUvM,17155 +matplotlib/mpl-data/fonts/afm/phvb8an.afm,sha256=8e_myD-AQkNF7q9XNLb2m76_lX2TUr3a5wog_LIE1sk,17086 +matplotlib/mpl-data/fonts/afm/phvbo8a.afm,sha256=8fkBRmJ-SWY2YrBg8fFyjJyrJp8daQ6JPO6LvhM8xPI,17230 +matplotlib/mpl-data/fonts/afm/phvbo8an.afm,sha256=aeVRvV4r15BBvxuRJ0MG8ZHuH2HViuIiCYkvuapmkmM,17195 +matplotlib/mpl-data/fonts/afm/phvl8a.afm,sha256=IyMYM-bgl-gI6rG0EuZZ2OLzlxJfGeSh8xqsh0t-eJQ,15627 +matplotlib/mpl-data/fonts/afm/phvlo8a.afm,sha256=s12C-eNnIDHJ_UVbuiprjxBjCiHIbS3Y8ORTC-qTpuI,15729 +matplotlib/mpl-data/fonts/afm/phvr8a.afm,sha256=Kt8KaRidts89EBIK29X2JomDUEDxvroeaJz_RNTi6r4,17839 +matplotlib/mpl-data/fonts/afm/phvr8an.afm,sha256=lL5fAHTRwODl-sB5mH7IfsD1tnnea4yRUK-_Ca2bQHM,17781 +matplotlib/mpl-data/fonts/afm/phvro8a.afm,sha256=3KqK3eejiR4hIFBUynuSX_4lMdE2V2T58xOF8lX-fwc,17919 +matplotlib/mpl-data/fonts/afm/phvro8an.afm,sha256=Vx9rRf3YfasMY7tz-njSxz67xHKk-fNkN7yBi0X2IP0,17877 +matplotlib/mpl-data/fonts/afm/pncb8a.afm,sha256=aoXepTcDQtQa_mspflMJkEFKefzXHoyjz6ioJVI0YNc,16028 +matplotlib/mpl-data/fonts/afm/pncbi8a.afm,sha256=pCWW1MYgy0EmvwaYsaYJaAI_LfrsKmDANHu7Pk0RaiU,17496 +matplotlib/mpl-data/fonts/afm/pncr8a.afm,sha256=0CIB2BLe9r-6_Wl5ObRTTf98UOrezmGQ8ZOuBX5kLks,16665 +matplotlib/mpl-data/fonts/afm/pncri8a.afm,sha256=5R-pLZOnaHNG8pjV6MP3Ai-d2OTQYR_cYCb5zQhzfSU,16920 +matplotlib/mpl-data/fonts/afm/pplb8a.afm,sha256=3EzUbNnXr5Ft5eFLY00W9oWu59rHORgDXUuJaOoKN58,15662 +matplotlib/mpl-data/fonts/afm/pplbi8a.afm,sha256=X_9tVspvrcMer3OS8qvdwjFFqpAXYZneyCL2NHA902g,15810 +matplotlib/mpl-data/fonts/afm/pplr8a.afm,sha256=ijMb497FDJ9nVdVMb21F7W3-cu9sb_9nF0oriFpSn8k,15752 +matplotlib/mpl-data/fonts/afm/pplri8a.afm,sha256=8KITbarcUUMi_hdoRLLmNHtlqs0TtOSKqtPFft7X5nY,15733 +matplotlib/mpl-data/fonts/afm/psyr.afm,sha256=Iyt8ajE4B2Tm34oBj2pKtctIf9kPfq05suQefq8p3Ro,9644 +matplotlib/mpl-data/fonts/afm/ptmb8a.afm,sha256=bL1fA1NC4_nW14Zrnxz4nHlXJb4dzELJPvodqKnYeMg,17983 +matplotlib/mpl-data/fonts/afm/ptmbi8a.afm,sha256=-_Ui6XlKaFTHEnkoS_-1GtIr5VtGa3gFQ2ezLOYHs08,18070 +matplotlib/mpl-data/fonts/afm/ptmr8a.afm,sha256=IEcsWcmzJyjCwkgsw4o6hIMmzlyXUglJat9s1PZNnEU,17942 +matplotlib/mpl-data/fonts/afm/ptmri8a.afm,sha256=49fQMg5fIGguZ7rgc_2styMK55Pv5bPTs7wCzqpcGpk,18068 +matplotlib/mpl-data/fonts/afm/putb8a.afm,sha256=qMaHTdpkrNL-m4DWhjpxJCSmgYkCv1qIzLlFfM0rl40,21532 +matplotlib/mpl-data/fonts/afm/putbi8a.afm,sha256=g7AVJyiTxeMpNk_1cSfmYgM09uNUfPlZyWGv3D1vcAk,21931 +matplotlib/mpl-data/fonts/afm/putr8a.afm,sha256=XYmNC5GQgSVAZKTIYdYeNksE6znNm9GF_0SmQlriqx0,22148 +matplotlib/mpl-data/fonts/afm/putri8a.afm,sha256=i7fVe-iLyLtQxCfAa4IxdxH-ufcHmMk7hbCGG5TxAY4,21891 +matplotlib/mpl-data/fonts/afm/pzcmi8a.afm,sha256=wyuoIWEZOcoXrSl1tPzLkEahik7kGi91JJj-tkFRG4A,16250 +matplotlib/mpl-data/fonts/afm/pzdr.afm,sha256=MyjLAnzKYRdQBfof1W3k_hf30MvqOkqL__G22mQ5xww,9467 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Bold.afm,sha256=sIDDI-B82VZ3C0mI_mHFITCZ7PVn37AIYMv1CrHX4sE,15333 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-BoldOblique.afm,sha256=zg61QobD3YU9UBfCXmvmhBNaFKno-xj8sY0b2RpgfLw,15399 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Oblique.afm,sha256=vRQm5j1sTUN4hicT1PcVZ9P9DTTUHhEzfPXqUUzVZhE,15441 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier.afm,sha256=Mdcq2teZEBJrIqVXnsnhee7oZnTs6-P8_292kWGTrw4,15335 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Bold.afm,sha256=i2l4gcjuYXoXf28uK7yIVwuf0rnw6J7PwPVQeHj5iPw,69269 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-BoldOblique.afm,sha256=Um5O6qK11DXLt8uj_0IoWkc84TKqHK3bObSKUswQqvY,69365 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Oblique.afm,sha256=hVYDg2b52kqtbVeCzmiv25bW1yYdpkZS-LXlGREN2Rs,74392 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica.afm,sha256=23cvKDD7bQAJB3kdjSahJSTZaUOppznlIO6FXGslyW8,74292 +matplotlib/mpl-data/fonts/pdfcorefonts/Symbol.afm,sha256=P5UaoXr4y0qh4SiMa5uqijDT6ZDr2-jPmj1ayry593E,9740 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Bold.afm,sha256=cQTmr2LFPwKQE_sGQageMcmFicjye16mKJslsJLHQyE,64251 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-BoldItalic.afm,sha256=pzWOdycm6RqocBWgAVY5Jq0z3Fp7LuqWgLNMx4q6OFw,59642 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Italic.afm,sha256=bK5puSMpGT_YUILwyJrXoxjfj7XJOdfv5TQ_iKsJRzw,66328 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Roman.afm,sha256=hhNrUnpazuDDKD1WpraPxqPWCYLrO7D7bMVOg-zI13o,60460 +matplotlib/mpl-data/fonts/pdfcorefonts/ZapfDingbats.afm,sha256=ZuOmt9GcKofjdOq8kqhPhtAIhOwkL2rTJTmZxAjFakA,9527 +matplotlib/mpl-data/fonts/pdfcorefonts/readme.txt,sha256=MRv8ppSITYYAb7lt5EOw9DWWNZIblfxsFhu5TQE7cpI,828 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-Bold.ttf,sha256=sYS4njwQdfIva3FXW2_CDUlys8_TsjMiym_Vltyu8Wc,704128 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-BoldOblique.ttf,sha256=bt8CgxYBhq9FHL7nHnuEXy5Mq_Jku5ks5mjIPCVGXm8,641720 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-Oblique.ttf,sha256=zN90s1DxH9PdV3TeUOXmNGoaXaH1t9X7g1kGZel6UhM,633840 +matplotlib/mpl-data/fonts/ttf/DejaVuSans.ttf,sha256=P99pyr8GBJ6nCgC1kZNA4s4ebQKwzDxLRPtoAb0eDSI,756072 +matplotlib/mpl-data/fonts/ttf/DejaVuSansDisplay.ttf,sha256=ggmdz7paqGjN_CdFGYlSX-MpL3N_s8ngMozpzvWWUvY,25712 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Bold.ttf,sha256=uq2ppRcv4giGJRr_BDP8OEYZEtXa8HKH577lZiCo2pY,331536 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-BoldOblique.ttf,sha256=ppCBwVx2yCfgonpaf1x0thNchDSZlVSV_6jCDTqYKIs,253116 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Oblique.ttf,sha256=KAUoE_enCfyJ9S0ZLcmV708P3Fw9e3OknWhJsZFtDNA,251472 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono.ttf,sha256=YC7Ia4lIz82VZIL-ZPlMNshndwFJ7y95HUYT9EO87LM,340240 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Bold.ttf,sha256=w3U_Lta8Zz8VhG3EWt2-s7nIcvMvsY_VOiHxvvHtdnY,355692 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-BoldItalic.ttf,sha256=2T7-x6nS6CZ2jRou6VuVhw4V4pWZqE80hK8d4c7C4YE,347064 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Italic.ttf,sha256=PnmU-8VPoQzjNSpC1Uj63X2crbacsRCbydlg9trFfwQ,345612 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif.ttf,sha256=EHJElW6ZYrnpb6zNxVGCXgrgiYrhNzcTPhuSGi_TX_o,379740 +matplotlib/mpl-data/fonts/ttf/DejaVuSerifDisplay.ttf,sha256=KRTzLkfHd8J75Wd6-ufbTeefnkXeb8kJfZlJwjwU99U,14300 +matplotlib/mpl-data/fonts/ttf/LICENSE_DEJAVU,sha256=11k43sCY8G8Kw8AIUwZdlPAgvhw8Yu8dwpdboVtNmw4,4816 +matplotlib/mpl-data/fonts/ttf/LICENSE_STIX,sha256=urPTHf7wf0g2JPL2XycR52BluOcnMnixwHHt4QQcmVk,5476 +matplotlib/mpl-data/fonts/ttf/STIXGeneral.ttf,sha256=FnN4Ax4t3cYhbWeBnJJg6aBv_ExHjk4jy5im_USxg8I,448228 +matplotlib/mpl-data/fonts/ttf/STIXGeneralBol.ttf,sha256=6FM9xwg_o0a9oZM9YOpKg7Z9CUW86vGzVB-CtKDixqA,237360 +matplotlib/mpl-data/fonts/ttf/STIXGeneralBolIta.ttf,sha256=mHiP1LpI37sr0CbA4gokeosGxzcoeWKLemuw1bsJc2w,181152 +matplotlib/mpl-data/fonts/ttf/STIXGeneralItalic.ttf,sha256=bPyzM9IrfDxiO9_UAXTxTIXD1nMcphZsHtyAFA6uhSc,175040 +matplotlib/mpl-data/fonts/ttf/STIXNonUni.ttf,sha256=Ulb34CEzWsSFTRgPDovxmJZOwvyCAXYnbhaqvGU3u1c,59108 +matplotlib/mpl-data/fonts/ttf/STIXNonUniBol.ttf,sha256=XRBqW3jR_8MBdFU0ObhiV7-kXwiBIMs7QVClHcT5tgs,30512 +matplotlib/mpl-data/fonts/ttf/STIXNonUniBolIta.ttf,sha256=pb22DnbDf2yQqizotc3wBDqFGC_g27YcCGJivH9-Le8,41272 +matplotlib/mpl-data/fonts/ttf/STIXNonUniIta.ttf,sha256=BMr9pWiBv2YIZdq04X4c3CgL6NPLUPrl64aV1N4w9Ug,46752 +matplotlib/mpl-data/fonts/ttf/STIXSizFiveSymReg.ttf,sha256=wYuH1gYUpCuusqItRH5kf9p_s6mUD-9X3L5RvRtKSxs,13656 +matplotlib/mpl-data/fonts/ttf/STIXSizFourSymBol.ttf,sha256=yNdvjUoSmsZCULmD7SVq9HabndG9P4dPhboL1JpAf0s,12228 +matplotlib/mpl-data/fonts/ttf/STIXSizFourSymReg.ttf,sha256=-9xVMYL4_1rcO8FiCKrCfR4PaSmKtA42ddLGqwtei1w,15972 +matplotlib/mpl-data/fonts/ttf/STIXSizOneSymBol.ttf,sha256=cYexyo8rZcdqMlpa9fNF5a2IoXLUTZuIvh0JD1Qp0i4,12556 +matplotlib/mpl-data/fonts/ttf/STIXSizOneSymReg.ttf,sha256=0lbHzpndzJmO8S42mlkhsz5NbvJLQCaH5Mcc7QZRDzc,19760 +matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymBol.ttf,sha256=3eBc-VtYbhQU3BnxiypfO6eAzEu8BdDvtIJSFbkS2oY,12192 +matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymReg.ttf,sha256=XFSKCptbESM8uxHtUFSAV2cybwxhSjd8dWVByq6f3w0,15836 +matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymBol.ttf,sha256=MUCYHrA0ZqFiSE_PjIGlJZgMuv79aUgQqE7Dtu3kuo0,12116 +matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymReg.ttf,sha256=_sdxDuEwBDtADpu9CyIXQxV7sIqA2TZVBCUiUjq5UCk,15704 +matplotlib/mpl-data/fonts/ttf/cmb10.ttf,sha256=B0SXtQxD6ldZcYFZH5iT04_BKofpUQT1ZX_CSB9hojo,25680 +matplotlib/mpl-data/fonts/ttf/cmex10.ttf,sha256=ryjwwXByOsd2pxv6WVrKCemNFa5cPVTOGa_VYZyWqQU,21092 +matplotlib/mpl-data/fonts/ttf/cmmi10.ttf,sha256=MJKWW4gR_WpnZXmWZIRRgfwd0TMLk3-RWAjEhdMWI00,32560 +matplotlib/mpl-data/fonts/ttf/cmr10.ttf,sha256=Tdl2GwWMAJ25shRfVe5mF9CTwnPdPWxbPkP_YRD6m_Y,26348 +matplotlib/mpl-data/fonts/ttf/cmss10.ttf,sha256=ffkag9BbLkcexjjLC0NaNgo8eSsJ_EKn2mfpHy55EVo,20376 +matplotlib/mpl-data/fonts/ttf/cmsy10.ttf,sha256=uyJu2TLz8QDNDlL15JEu5VO0G2nnv9uNOFTbDrZgUjI,29396 +matplotlib/mpl-data/fonts/ttf/cmtt10.ttf,sha256=YhHwmuk1mZka_alwwkZp2tGnfiU9kVYk-_IS9wLwcdc,28136 +matplotlib/mpl-data/images/back-symbolic.svg,sha256=Okj_ressZkfe6Ewv_o7GF5toc5qWCeFkQ2cHQ25BdVE,1532 +matplotlib/mpl-data/images/back.pdf,sha256=ZR7CJo_dAeCM-KlaGvskgtHQyRtrPIolc8REOmcoqJk,1623 +matplotlib/mpl-data/images/back.png,sha256=E4dGf4Gnz1xJ1v2tMygHV0YNQgShreDeVApaMb-74mU,380 +matplotlib/mpl-data/images/back.svg,sha256=Okj_ressZkfe6Ewv_o7GF5toc5qWCeFkQ2cHQ25BdVE,1532 +matplotlib/mpl-data/images/back_large.png,sha256=9A6hUSQeszhYONE4ZuH3kvOItM0JfDVu6tkfromCbsQ,620 +matplotlib/mpl-data/images/filesave-symbolic.svg,sha256=dMGXvLSOHPu44kiWgZx-B_My_tLWaP6J6GgxJfL4FW0,2049 +matplotlib/mpl-data/images/filesave.pdf,sha256=P1EPPV2g50WTt8UaX-6kFoTZM1xVqo6S2H6FJ6Zd1ec,1734 +matplotlib/mpl-data/images/filesave.png,sha256=b7ctucrM_F2mG-DycTedG_a_y4pHkx3F-zM7l18GLhk,458 +matplotlib/mpl-data/images/filesave.svg,sha256=dMGXvLSOHPu44kiWgZx-B_My_tLWaP6J6GgxJfL4FW0,2049 +matplotlib/mpl-data/images/filesave_large.png,sha256=LNbRD5KZ3Kf7nbp-stx_a1_6XfGBSWUfDdpgmnzoRvk,720 +matplotlib/mpl-data/images/forward-symbolic.svg,sha256=kOiKq3a4mieMRLVCwQBdOMTRrWG2NOX_5-rbAFHpdmQ,1551 +matplotlib/mpl-data/images/forward.pdf,sha256=KIqIL4YId43LkcOxV_TT5uvz1SP8k5iUNUeJmAElMV8,1630 +matplotlib/mpl-data/images/forward.png,sha256=pKbLepgGiGeyY2TCBl8svjvm7Z4CS3iysFxcq4GR-wk,357 +matplotlib/mpl-data/images/forward.svg,sha256=kOiKq3a4mieMRLVCwQBdOMTRrWG2NOX_5-rbAFHpdmQ,1551 +matplotlib/mpl-data/images/forward_large.png,sha256=36h7m7DZDHql6kkdpNPckyi2LKCe_xhhyavWARz_2kQ,593 +matplotlib/mpl-data/images/hand.pdf,sha256=hspwkNY915KPD7AMWnVQs7LFPOtlcj0VUiLu76dMabQ,4172 +matplotlib/mpl-data/images/hand.png,sha256=2cchRETGKa0hYNKUxnJABwkyYXEBPqJy_VqSPlT0W2Q,979 +matplotlib/mpl-data/images/hand.svg,sha256=hxxBtakaVFA7mpZOGakvo0QUcb2x06rojeS5gnVmyuc,4906 +matplotlib/mpl-data/images/help-symbolic.svg,sha256=XVcFcuzcL3SQ3LjfSbtdLYDjoB5YUkj2jk2Gk8vaZF8,1890 +matplotlib/mpl-data/images/help.pdf,sha256=CeE978IMi0YWznWKjIT1R8IrP4KhZ0S7usPUvreSgcA,1813 +matplotlib/mpl-data/images/help.png,sha256=s4pQrqaQ0py8I7vc9hv3BI3DO_tky-7YBMpaHuBDCBY,472 +matplotlib/mpl-data/images/help.svg,sha256=XVcFcuzcL3SQ3LjfSbtdLYDjoB5YUkj2jk2Gk8vaZF8,1890 +matplotlib/mpl-data/images/help_large.png,sha256=1IwEyWfGRgnoCWM-r9CJHEogTJVD5n1c8LXTK4AJ4RE,747 +matplotlib/mpl-data/images/home-symbolic.svg,sha256=ptrus8h5PZTi9ahYfnaz-uZ8MAHCr72aPeMW48TBR9Q,1911 +matplotlib/mpl-data/images/home.pdf,sha256=e0e0pI-XRtPmvUCW2VTKL1DeYu1pvPmUUeRSgEbWmik,1737 +matplotlib/mpl-data/images/home.png,sha256=IcFdAAUa6_A0qt8IO3I8p4rpXpQgAlJ8ndBECCh7C1w,468 +matplotlib/mpl-data/images/home.svg,sha256=ptrus8h5PZTi9ahYfnaz-uZ8MAHCr72aPeMW48TBR9Q,1911 +matplotlib/mpl-data/images/home_large.png,sha256=uxS2O3tWOHh1iau7CaVV4ermIJaZ007ibm5Z3i8kXYg,790 +matplotlib/mpl-data/images/matplotlib.pdf,sha256=BkSUf-2xoij-eXfpV2t7y1JFKG1zD1gtV6aAg3Xi_wE,22852 +matplotlib/mpl-data/images/matplotlib.png,sha256=w8KLRYVa-voUZXa41hgJauQuoois23f3NFfdc72pUYY,1283 +matplotlib/mpl-data/images/matplotlib.svg,sha256=QiTIcqlQwGaVPtHsEk-vtmJk1wxwZSvijhqBe_b9VCI,62087 +matplotlib/mpl-data/images/matplotlib_large.png,sha256=ElRoue9grUqkZXJngk-nvh4GKfpvJ4gE69WryjCbX5U,3088 +matplotlib/mpl-data/images/move-symbolic.svg,sha256=_uamLnjQ20iwSuKbd8JvTXUFaRq4206MrpFWvtErr8I,2529 +matplotlib/mpl-data/images/move.pdf,sha256=CXk3PGK9WL5t-5J-G2X5Tl-nb6lcErTBS5oUj2St6aU,1867 +matplotlib/mpl-data/images/move.png,sha256=TmjR41IzSzxGbhiUcV64X0zx2BjrxbWH3cSKvnG2vzc,481 +matplotlib/mpl-data/images/move.svg,sha256=_uamLnjQ20iwSuKbd8JvTXUFaRq4206MrpFWvtErr8I,2529 +matplotlib/mpl-data/images/move_large.png,sha256=Skjz2nW_RTA5s_0g88gdq2hrVbm6DOcfYW4Fu42Fn9U,767 +matplotlib/mpl-data/images/qt4_editor_options.pdf,sha256=2qu6GVyBrJvVHxychQoJUiXPYxBylbH2j90QnytXs_w,1568 +matplotlib/mpl-data/images/qt4_editor_options.png,sha256=EryQjQ5hh2dwmIxtzCFiMN1U6Tnd11p1CDfgH5ZHjNM,380 +matplotlib/mpl-data/images/qt4_editor_options.svg,sha256=sdrNIxYT-BLvJ30ASnaRQ5PxF3SB41-pgdaIJT0KqBg,1264 +matplotlib/mpl-data/images/qt4_editor_options_large.png,sha256=-Pd-9Vh5aIr3PZa8O6Ge_BLo41kiEnpmkdDj8a11JkY,619 +matplotlib/mpl-data/images/subplots-symbolic.svg,sha256=Gq4fDSS99Rv5rbR8_nenV6jcY5VsKPARWeH-BZBk9CU,2150 +matplotlib/mpl-data/images/subplots.pdf,sha256=Q0syPMI5EvtgM-CE-YXKOkL9eFUAZnj_X2Ihoj6R4p4,1714 +matplotlib/mpl-data/images/subplots.png,sha256=MUfCItq3_yzb9yRieGOglpn0Y74h8IA7m5i70B63iRc,445 +matplotlib/mpl-data/images/subplots.svg,sha256=Gq4fDSS99Rv5rbR8_nenV6jcY5VsKPARWeH-BZBk9CU,2150 +matplotlib/mpl-data/images/subplots_large.png,sha256=Edu9SwVMQEXJZ5ogU5cyW7VLcwXJdhdf-EtxxmxdkIs,662 +matplotlib/mpl-data/images/zoom_to_rect-symbolic.svg,sha256=uMmdGkO43ZHlezkpieR3_MiqlEc5vROffRDOhY4sxm4,1499 +matplotlib/mpl-data/images/zoom_to_rect.pdf,sha256=SEvPc24gfZRpl-dHv7nx8KkxPyU66Kq4zgQTvGFm9KA,1609 +matplotlib/mpl-data/images/zoom_to_rect.png,sha256=aNz3QZBrIgxu9E-fFfaQweCVNitGuDUFoC27e5NU2L4,530 +matplotlib/mpl-data/images/zoom_to_rect.svg,sha256=uMmdGkO43ZHlezkpieR3_MiqlEc5vROffRDOhY4sxm4,1499 +matplotlib/mpl-data/images/zoom_to_rect_large.png,sha256=V6pkxmm6VwFExdg_PEJWdK37HB7k3cE_corLa7RbUMk,1016 +matplotlib/mpl-data/kpsewhich.lua,sha256=RdyYaBnBLy3NsB5c2R5FGrKu-V-WBcZim24NWilsTfw,139 +matplotlib/mpl-data/matplotlibrc,sha256=CXIWz2WeGv3kyoQsMBDOO0m3yF9M_pK1EihXl01w0bw,43413 +matplotlib/mpl-data/plot_directive/plot_directive.css,sha256=utSJ1oETz0UG6AC9hU134J_JY78ENijqMZXN0JMBUfk,318 +matplotlib/mpl-data/sample_data/Minduka_Present_Blue_Pack.png,sha256=XnKGiCanpDKalQ5anvo5NZSAeDP7fyflzQAaivuc0IE,13634 +matplotlib/mpl-data/sample_data/README.txt,sha256=ABz19VBKfGewdY39QInG9Qccgn1MTYV3bT5Ph7TCy2Y,128 +matplotlib/mpl-data/sample_data/Stocks.csv,sha256=72878aZNXGxd5wLvFUw_rnj-nfg4gqtrucZji-w830c,67924 +matplotlib/mpl-data/sample_data/axes_grid/bivariate_normal.npy,sha256=DpWZ9udAh6ospYqneEa27D6EkRgORFwHosacZXVu98U,1880 +matplotlib/mpl-data/sample_data/data_x_x2_x3.csv,sha256=A0SU3buOUGhT-NI_6LQ6p70fFSIU3iLFdgzvzrKR6SE,132 +matplotlib/mpl-data/sample_data/eeg.dat,sha256=KGVjFt8ABKz7p6XZirNfcxSTOpGGNuyA8JYErRKLRBc,25600 +matplotlib/mpl-data/sample_data/embedding_in_wx3.xrc,sha256=cUqVw5vDHNSZoaO4J0ebZUf5SrJP36775abs7R9Bclg,2186 +matplotlib/mpl-data/sample_data/goog.npz,sha256=QAkXzzDmtmT3sNqT18dFhg06qQCNqLfxYNLdEuajGLE,22845 +matplotlib/mpl-data/sample_data/grace_hopper.jpg,sha256=qMptc0dlcDsJcoq0f-WfRz2Trjln_CTHwCiMPHrbcTA,61306 +matplotlib/mpl-data/sample_data/jacksboro_fault_dem.npz,sha256=1JP1CjPoKkQgSUxU0fyhU50Xe9wnqxkLxf5ukvYvtjc,174061 +matplotlib/mpl-data/sample_data/logo2.png,sha256=DXNx4FXeyqxHy26AmvNELpwezQLxweLQY9HP7ktKIdc,22279 +matplotlib/mpl-data/sample_data/membrane.dat,sha256=q3lbQpIBpbtXXGNw1eFwkN_PwxdDGqk4L46IE2b0M1c,48000 +matplotlib/mpl-data/sample_data/msft.csv,sha256=GArKb0O3DgKZRsKdJf6lX3rMSf-PCekIiBoLNdgF7Mk,3211 +matplotlib/mpl-data/sample_data/s1045.ima.gz,sha256=MrQk1k9it-ccsk0p_VOTitVmTWCAVaZ6srKvQ2n4uJ4,33229 +matplotlib/mpl-data/sample_data/topobathy.npz,sha256=AkTgMpFwLfRQJNy1ysvE89TLMNct-n_TccSsYcQrT78,45224 +matplotlib/mpl-data/stylelib/Solarize_Light2.mplstyle,sha256=aytOm4eT_SPvs7HC28ZY4GukeN44q-SE0JEMCR8kVOk,1257 +matplotlib/mpl-data/stylelib/_classic_test_patch.mplstyle,sha256=iopHpMaM3im_AK2aiHGuM2DKM5i9Kc84v6NQEoSb10Q,167 +matplotlib/mpl-data/stylelib/_mpl-gallery-nogrid.mplstyle,sha256=1VOL3USqD6iuGQaSynNg1QhyUwvKLnkLyUKdbBMnnqg,489 +matplotlib/mpl-data/stylelib/_mpl-gallery.mplstyle,sha256=MN-q59CiDqHXB8xFKXxzCbJJbJmNDhBe9lDJJAoMTPA,504 +matplotlib/mpl-data/stylelib/bmh.mplstyle,sha256=-KbhaI859BITHIoyUZIfpQDjfckgLAlDAS_ydKsm6mc,712 +matplotlib/mpl-data/stylelib/classic.mplstyle,sha256=1o5b47VD_RIZv3unnG9Gm2tbprTvOeNGXM8hJCmGuYI,24670 +matplotlib/mpl-data/stylelib/dark_background.mplstyle,sha256=GzSBD06jvlRYOqu7D5Z5a5x25l9JnTgtObn7S4D9zug,607 +matplotlib/mpl-data/stylelib/fast.mplstyle,sha256=yTa2YEIIP9xi5V_G0p2vSlxghuhNwjRi9gPECMxyRiM,288 +matplotlib/mpl-data/stylelib/fivethirtyeight.mplstyle,sha256=IcBt2DSDz9CtuyCis9JZtT3Nqoh6LVO-dB66AxObUcA,780 +matplotlib/mpl-data/stylelib/ggplot.mplstyle,sha256=u2oPHMLWFtZcpIjHk2swi2Nrt4NgnEtof5lxcwM0RD0,956 +matplotlib/mpl-data/stylelib/grayscale.mplstyle,sha256=KCLg-pXpns9cnKDXKN2WH6mV41OH-6cbT-5zKQotSdw,526 +matplotlib/mpl-data/stylelib/petroff10.mplstyle,sha256=Qj7pPbHh3L25kdsPt1ypwvKR3dPzuOrFzzQTP3Mfilg,298 +matplotlib/mpl-data/stylelib/seaborn-v0_8-bright.mplstyle,sha256=pDqn3-NUyVLvlfkYs8n8HzNZvmslVMChkeH-HtZuJIc,144 +matplotlib/mpl-data/stylelib/seaborn-v0_8-colorblind.mplstyle,sha256=eCSzFj5_2vR6n5qu1rHE46wvSVGZcdVqz85ov40ZsH8,148 +matplotlib/mpl-data/stylelib/seaborn-v0_8-dark-palette.mplstyle,sha256=p5ABKNQHRG7bk4HXqMQrRBjDlxGAo3RCXHdQmP7g-Ng,142 +matplotlib/mpl-data/stylelib/seaborn-v0_8-dark.mplstyle,sha256=I4xQ75vE5_9X4k0cNDiqhhnF3OcrZ2xlPX8Ll7OCkoE,667 +matplotlib/mpl-data/stylelib/seaborn-v0_8-darkgrid.mplstyle,sha256=2bXOSzS5gmPzRBrRmzVWyhg_7ZaBRQ6t_-O-cRuyZoA,670 +matplotlib/mpl-data/stylelib/seaborn-v0_8-deep.mplstyle,sha256=44dLcXjjRgR-6yaopgGRInaVgz3jk8VJVQTbBIcxRB0,142 +matplotlib/mpl-data/stylelib/seaborn-v0_8-muted.mplstyle,sha256=T4o3jvqKD_ImXDkp66XFOV_xrBVFUolJU34JDFk1Xkk,143 +matplotlib/mpl-data/stylelib/seaborn-v0_8-notebook.mplstyle,sha256=PcvZQbYrDdducrNlavBPmQ1g2minio_9GkUUFRdgtoM,382 +matplotlib/mpl-data/stylelib/seaborn-v0_8-paper.mplstyle,sha256=n0mboUp2C4Usq2j6tNWcu4TZ_YT4-kKgrYO0t-rz1yw,393 +matplotlib/mpl-data/stylelib/seaborn-v0_8-pastel.mplstyle,sha256=8nV8qRpbUrnFZeyE6VcQ1oRuZPLil2W74M2U37DNMOE,144 +matplotlib/mpl-data/stylelib/seaborn-v0_8-poster.mplstyle,sha256=dUaKqTE4MRfUq2rWVXbbou7kzD7Z9PE9Ko8aXLza8JA,403 +matplotlib/mpl-data/stylelib/seaborn-v0_8-talk.mplstyle,sha256=7FnBaBEdWBbncTm6_ER-EQVa_bZgU7dncgez-ez8R74,403 +matplotlib/mpl-data/stylelib/seaborn-v0_8-ticks.mplstyle,sha256=CITZmZFUFp40MK2Oz8tI8a7WRoCizQU9Z4J172YWfWw,665 +matplotlib/mpl-data/stylelib/seaborn-v0_8-white.mplstyle,sha256=WjJ6LEU6rlCwUugToawciAbKP9oERFHr9rfFlUrdTx0,665 +matplotlib/mpl-data/stylelib/seaborn-v0_8-whitegrid.mplstyle,sha256=ec4BjsNzmOvHptcJ3mdPxULF3S1_U1EUocuqfIpw-Nk,664 +matplotlib/mpl-data/stylelib/seaborn-v0_8.mplstyle,sha256=_Xu6qXKzi4b3GymCOB1b1-ykKTQ8xhDliZ8ezHGTiAs,1130 +matplotlib/mpl-data/stylelib/tableau-colorblind10.mplstyle,sha256=BsirZVd1LmPWT4tBIz6loZPjZcInoQrIGfC7rvzqmJw,190 +matplotlib/offsetbox.py,sha256=WGa1TdIGkg1LPPL6ZJ8dbE8wfumJMz863sjsafPm5Vg,54488 +matplotlib/offsetbox.pyi,sha256=ISf7gRrOhvefNKfutbHIPh6QYS0kAgtA97bYa6N-82o,9909 +matplotlib/patches.py,sha256=kI0XgcYLusGwILt147Hmoh4NhoR6unlZdafgQrZxZJY,165054 +matplotlib/patches.pyi,sha256=d-sCqVXw_ANsvoUpliH7dllMbTy7TcyRm2IV97Ll0nM,22649 +matplotlib/path.py,sha256=CD1LeGQMm9p0U5mSRIG1L2WCiIie3rYhykLwud1qLA8,42717 +matplotlib/path.pyi,sha256=N1XOCWSUG4RtCcMo02yvvq3HDYo_369oKQkEJUHN80c,4777 +matplotlib/patheffects.py,sha256=4paQgPGKc_xdBz_BZ_MaVhKJ2FxZDGeh5zHwzn3evDY,18387 +matplotlib/patheffects.pyi,sha256=7-FhuxGGrer94GtJ1sZ0YxOmK6Nv4oixTmsKb6-ijOg,3664 +matplotlib/projections/__init__.py,sha256=gICKgNfjJ-tioWEFa75BKKJZG6JJjx46_RpEqt5fk94,4438 +matplotlib/projections/__init__.pyi,sha256=D28dSYmwZcSBFBtNDer-QqE_lqXAhHKersuAlvi89jE,673 +matplotlib/projections/geo.py,sha256=feR2dybCylhk6xe-MzxBr_e8MdbpVc8tBtCQQJ0V2co,17605 +matplotlib/projections/geo.pyi,sha256=vPfhvj7_e0ZnKjyfDUNC89RGCktycJBPnn5D8w0A7N8,3775 +matplotlib/projections/polar.py,sha256=f7-1dEIXWkh2_NOoVf8pcVPe0PqfkrKL0V1-ykElM_s,56939 +matplotlib/projections/polar.pyi,sha256=IWNSLQIo5cXFBJur5VFVDQBwAtq5n1XFZMnrj85mCBg,6636 +matplotlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/pylab.py,sha256=VqUqd2J2-dKtltZtsYP8ePKoX1jsNDpTyqlcmpfC-lo,2369 +matplotlib/pyplot.py,sha256=Dbp8GKxM6B5oCmcjb8lsik2R8QPtPt3JgmgaqeWqkPw,149704 +matplotlib/quiver.py,sha256=L8O6Ke6oH60R9VA_gd9JiA5Fvv5sMPIFyC_uNRrZSo0,48675 +matplotlib/quiver.pyi,sha256=Kq1FvQP-DRX1N9Xlp6XF6OJfKkJ7tc4dYq4CEndxEzg,5640 +matplotlib/rcsetup.py,sha256=87KrQeDFg4sRu4obxF4mc0eGSCuClGyuzXsxoZ_kme4,51606 +matplotlib/rcsetup.pyi,sha256=0VTIhzfKgBcKxOeOdoKq1oa7ZubalOj9Ks0efE0-cms,4337 +matplotlib/sankey.py,sha256=eKA9DrX96-bpoWRdeNnvC73aJfPtCnB78WmMPCXQ9rc,36151 +matplotlib/sankey.pyi,sha256=P2AGkhdg3ZLqXaQdf8yodblrBpdgEQZBLKPL1mbxmFc,1451 +matplotlib/scale.py,sha256=n3JqLU_d3dAKrP1uO5lTipVEaxgiLhDR2sSk8TAXE_M,26924 +matplotlib/scale.pyi,sha256=-ptRptcqiAuzfKwrjgSWWOxFmjRUTOKGwIoWtuBXKgY,5057 +matplotlib/sphinxext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/sphinxext/figmpl_directive.py,sha256=rgoJcGRfLdp4jiOlyBIwG83wcyfzbFVfrlrSCmDHL9g,9308 +matplotlib/sphinxext/mathmpl.py,sha256=CKIocBSV4prRMPR50NL6WxBTULBa1zReoqo7ZtbvCsc,7871 +matplotlib/sphinxext/plot_directive.py,sha256=6GEOX2y0K1Izc98Aojt8pCXS21Ucru6E-T_33lRU92s,32542 +matplotlib/sphinxext/roles.py,sha256=U7P4t06foLF2PsZJrTN4mfaspKGRoYwFZ3ijIhIUVyo,4882 +matplotlib/spines.py,sha256=rKAoBueuqmSids1SKFgWWgMtvUxo2HOhyw73Ri4HyOc,21643 +matplotlib/spines.pyi,sha256=eceCS47bOawgkWdLUMIPt6XX_H-EtWS2t-euz5Bgkbg,2951 +matplotlib/stackplot.py,sha256=a6IUAujDjGpaFqiTzGr85eDD8l6baWLPrM5YNvb6g9I,4997 +matplotlib/stackplot.pyi,sha256=BX2g3-6dJ4RhjjIguo-gwWhvKZNYqFCQYVw5SKvpM4c,561 +matplotlib/streamplot.py,sha256=rXVKYEi4MfSg3HnAaWb8w4qFp95mRf6dosMVqtXZEpE,24011 +matplotlib/streamplot.pyi,sha256=fkdkew8u807qd7WXFd_ZjF7FUbx8okOI8A7u4x_ktvg,2690 +matplotlib/style/__init__.py,sha256=fraQtyBC3TY2ZTsLdxL7zNs9tJYZtje7tbiqEf3M56M,140 +matplotlib/style/core.py,sha256=RSNH2NO0xK_Y37dbpMWyaWMMjvcMa_NHaKXDa5I4_tA,8362 +matplotlib/style/core.pyi,sha256=mIlyz6eChdMjZ_A7S05iJgcWwrWE2NXg2rhm1C1eYlQ,521 +matplotlib/table.py,sha256=MkF9s8u2X-9_Cei9sJixljb2_I_B_4UOqVx0iReKIU0,27744 +matplotlib/table.pyi,sha256=tcR40hoCWCRLlL-8PtFMa7P5sq8qajDSMFxd25Q90z4,3098 +matplotlib/testing/__init__.py,sha256=pTaqH1qgGNBeWCmvK_vjqFJXJ9dvZXCaKQ-8jHSpaEc,6942 +matplotlib/testing/__init__.pyi,sha256=ffqfetWzyCVrSx7BlnoxCmbgIaZg2x57nDrq9eucRk0,1752 +matplotlib/testing/_markers.py,sha256=0iNyOi25XLv_gTfSUqiRizdSqJzozePPBMRo72H2Je4,1419 +matplotlib/testing/compare.py,sha256=8dVcuP7FauHY1JZQXBD7fNBfQKb4IoXM6pVeq6zJgBs,19780 +matplotlib/testing/compare.pyi,sha256=xlJ4chgXKe567NUavlu-dalyPr4wAQUbd2Fz6aK_JII,1192 +matplotlib/testing/conftest.py,sha256=CcwfUtIkUOgGBlXXNplQE6dg2U_UBKdH6AajROPJ5uw,6683 +matplotlib/testing/conftest.pyi,sha256=zOF_MM2GjCkNi9nZ8vldV9VI41bD67nVhrIgLVuMNsk,416 +matplotlib/testing/decorators.py,sha256=1Xiyb2exeN0nDEO4he8MW_VKwFE34cB7EtRl6bcGRzg,18022 +matplotlib/testing/decorators.pyi,sha256=0fSpdLBtEH7ZP_trVJ7RPxNtOX9sJ_z-MkNsbUxF8nM,872 +matplotlib/testing/exceptions.py,sha256=72QmjiHG7DwxSvlJf8mei-hRit5AH3NKh0-osBo4YbY,138 +matplotlib/testing/jpl_units/Duration.py,sha256=9FMBu9uj6orCWtf23cf6_9HCFUC50xAHrCzaxATwQfM,3966 +matplotlib/testing/jpl_units/Epoch.py,sha256=-FGxeq-VvCS9GVPwOEE5ind_G4Tl9ztD-gYcW9CWzjo,6100 +matplotlib/testing/jpl_units/EpochConverter.py,sha256=fhWjyP567bzcTU_oNuJJpucoolqS88Nt-yEFg1-3yEk,2944 +matplotlib/testing/jpl_units/StrConverter.py,sha256=codGw9b_Zc-MG_YK4CiyMrnMR8ahR9hw836O2SsV8QI,2865 +matplotlib/testing/jpl_units/UnitDbl.py,sha256=EABjyEK4MVouyvlwi_9KdYDg-qbYY3aLHoUjRw37Fb0,5882 +matplotlib/testing/jpl_units/UnitDblConverter.py,sha256=B8DssrQVyC4mwvSFP78cGL0vCnZgVhDaAbZE-jsXLUg,2828 +matplotlib/testing/jpl_units/UnitDblFormatter.py,sha256=246hgA4_pCfJm-P94hEsxqnTS9t0XlvLC8p1v_bw2pU,657 +matplotlib/testing/jpl_units/__init__.py,sha256=p__9RUwrt2LJ2eoT2JPM-42XLxSJrfA4az3rN5uP6d4,2684 +matplotlib/testing/widgets.py,sha256=IaoPHgDguJSHd7t97kSVWUZvqTQ-Tsr9tU9kfnmJrAM,3480 +matplotlib/testing/widgets.pyi,sha256=Ioau7Q2aPRDZLx8hze2DOe3E1vn7QPxePC74WMR7tFc,831 +matplotlib/tests/__init__.py,sha256=XyXveEAxafB87gnbx0jkC0MggzKO8FvORq_6RtJRwo4,366 +matplotlib/tests/conftest.py,sha256=0cKxWVUC25UfQJQcAjFJa73MQE9eSYqKSXGDx7kAq48,138 +matplotlib/tests/test_afm.py,sha256=A7jm2o-QaQH9SSpiFxGtZkbVU0LJZE69jfPv7RczOD4,3701 +matplotlib/tests/test_agg.py,sha256=xAKrTzvT1dNM_49efiWisnGNrJtINOBenYSOtaRXvIg,10884 +matplotlib/tests/test_agg_filter.py,sha256=3c_Smtb4OHEOfdMFCOb2qKhzMXSbNoaUtsJ0pW47Q44,1067 +matplotlib/tests/test_animation.py,sha256=z0FJ0gx15rJB_khEPSg1E-vLGQyTj85Ki-uTjFsMEPE,18357 +matplotlib/tests/test_api.py,sha256=UJU_qoi7Y8Rbz1S89NYKvOkNwv_RBVPNFajrcXixNds,5736 +matplotlib/tests/test_arrow_patches.py,sha256=qgbS9OrlHa4TJihAd2ekW4AupCVgKDd2B5YyF704Pak,6577 +matplotlib/tests/test_artist.py,sha256=oblCTkCPZzufghCp19AKwIJftV69tZwEOjeRLn8soAQ,18599 +matplotlib/tests/test_axes.py,sha256=NyVdODWyq-ODna0ElxJW_sTDKw2iDx8_BD_cC7juIyU,328610 +matplotlib/tests/test_axis.py,sha256=Ljc_Yc7dAFkTuVW0PD0i3vEnjkGOuAehkpcNpWg9A7Y,1446 +matplotlib/tests/test_backend_bases.py,sha256=FwUgearYkC7UgqpRpaKk51o5F62ipDL4kUvFROwGVLo,22775 +matplotlib/tests/test_backend_cairo.py,sha256=O2LTYjsfPn__bKtTz4MGGBodpSshoPkzu0INsc18xmI,1821 +matplotlib/tests/test_backend_gtk3.py,sha256=Zb29s85nbRzwLVmChCA5_LphAeC_yKyP45kCcHxfgPE,2849 +matplotlib/tests/test_backend_inline.py,sha256=b7GrmMCrJfI5kXGJKbxdMM9kWBd-JKUdCkRq-vk8JXo,1608 +matplotlib/tests/test_backend_macosx.py,sha256=s4Kd1fnwYCuamGRLtkOAYtzer7jVOjLaA2LH52hhPqM,2233 +matplotlib/tests/test_backend_nbagg.py,sha256=o6ON7tt_LOW7wzg4StIMYuTgMFSPNWt1FOv2FFMIefk,1459 +matplotlib/tests/test_backend_pdf.py,sha256=CxVYTlz8_AQ7o90yh5cavpSnNBLozIL8UhscRYShUVM,14602 +matplotlib/tests/test_backend_pgf.py,sha256=WlGdZThz5uQ-x8-HqpMcB5IWmYijflNCC-rVNgvz0yw,13028 +matplotlib/tests/test_backend_ps.py,sha256=veHbm4_dpBDcczgomEjQxdz-Sv8wHij3LVSKWST9Bd0,12600 +matplotlib/tests/test_backend_qt.py,sha256=VTWiuGw-evGdc2OzaKRFU0MXvtwxInvZNXBr51y_Jg4,13000 +matplotlib/tests/test_backend_registry.py,sha256=SiyyRRMT1tIXKxwn7IXq0SaIrYt3DEvBBe8TvNxa644,5950 +matplotlib/tests/test_backend_svg.py,sha256=L73hpBa1P-y978Y3Gpd_eSJAFJMvUvrOqiYaJ-CkQUw,22930 +matplotlib/tests/test_backend_template.py,sha256=uuE7oZ9pSBVDWrPL05B0WgCFsgv6HlXyetuPTfJn6a8,2184 +matplotlib/tests/test_backend_tk.py,sha256=A1h3AfPhEMbG5PLdDr1gbnwt_DlaQOBgFux5QsIpCpM,8850 +matplotlib/tests/test_backend_tools.py,sha256=C-B7NCkyWsQ5KzQEnI5Be16DsAHHZJU9P5v9--wsF-o,501 +matplotlib/tests/test_backend_webagg.py,sha256=4Oc-z7w-mTGETB1x0FQ_gZP9qHfyWh5lwWc9qPkkisc,938 +matplotlib/tests/test_backends_interactive.py,sha256=uzbU_sKZVaKmKDFW0FgGRauPvVnmsFRlXQA5XB_UvKo,29020 +matplotlib/tests/test_basic.py,sha256=ubAnlE-lFQzMhoBlWYZJKCAsox9Y3jBXS_IMn29Zi84,1141 +matplotlib/tests/test_bbox_tight.py,sha256=5yo6yy3HTUIf2-oGZllieaLynmOupzWcuPpeGmgS5BQ,6314 +matplotlib/tests/test_bezier.py,sha256=IrrDWAV5O6ELMdziaaOeBI7LfyEaTBVsT4B1T9KMzD8,692 +matplotlib/tests/test_category.py,sha256=jWXOAGOR_gpvrjarvJB6Tyot0yOIWaO6d1F9mrNcVPo,12043 +matplotlib/tests/test_cbook.py,sha256=85PW_AjpGEqr6SfL8CjTY-kZ7TA0RMdrz8Gi3Dyc91g,33530 +matplotlib/tests/test_collections.py,sha256=3hg_WB-zv409PgIchCM7df5gVOiwYXj0H2n0ZFjrlmU,48406 +matplotlib/tests/test_colorbar.py,sha256=tDdmWz_LxCbD80ksM6iqrHYqpombyL8QtqpKQSKOS14,46711 +matplotlib/tests/test_colors.py,sha256=-Q5VZfQWI-NSMOa4tusxDgWe705a7uymMHLI8h10C8A,61215 +matplotlib/tests/test_compare_images.py,sha256=NcBoT8HAxtzoR1zZBu0V3MFpGWtdFaDblQ8o3OTO6zM,3260 +matplotlib/tests/test_constrainedlayout.py,sha256=WWQM9nDpVI8OLAQ_P88PqO2-hMiB7pBGAp0DnZBPJmM,23602 +matplotlib/tests/test_container.py,sha256=FTewckOd3dJqLOzEUa29Itjqusk7Mx7MK5xPVMhzMmc,694 +matplotlib/tests/test_contour.py,sha256=O_VIhjDgsb0A-brYR-gJ-WUcqzYzH0XLQD4aI_5SocM,30289 +matplotlib/tests/test_cycles.py,sha256=3KyRmWH29WUgvIXUT06tKVDNCfDWqxuxlLueIp-FIl0,5996 +matplotlib/tests/test_dates.py,sha256=jOK-KIAMBRCcnlgpzknpLJb3JvO3xFa5Qe3O5mVTeGM,56533 +matplotlib/tests/test_datetime.py,sha256=n2TYVItNQcUFBLUivuTXBnc0fElIeSmy9hu4ZNMVurc,32635 +matplotlib/tests/test_determinism.py,sha256=harG-hnOpD5HZFKPtHebT0snHu8V6C6BScVFml1Mqyg,7958 +matplotlib/tests/test_doc.py,sha256=K6HhdRcHRUNNC0iIhxBLCq5tOqxqHBSvanHxXg5w5iI,1015 +matplotlib/tests/test_dviread.py,sha256=JeTuA2FMUj1FddxDVBXUtnvZYTgztE-CyRXL_mI20P0,2764 +matplotlib/tests/test_figure.py,sha256=bGyAW9fwJc3gVVul_Vd5f8TEmbrEoDAMWV4Rm5-QXQQ,60289 +matplotlib/tests/test_font_manager.py,sha256=Qkewu4tMylgVciamVwWL8TFoDon8YyzN-9po6usN6Mk,14136 +matplotlib/tests/test_fontconfig_pattern.py,sha256=LSR6lWF_0cVOshkGiflYaTCLcISRIJM6mjXm5QtDjX4,2168 +matplotlib/tests/test_ft2font.py,sha256=lz13lRGmxdQ9eRjQlnigdfQWBwaST1A3_AgEAR5dJA0,40838 +matplotlib/tests/test_getattr.py,sha256=Tl_H1zpwLdSIVutc4vi-QwDCeWPzBGpN31N9ItzTkeQ,1090 +matplotlib/tests/test_gridspec.py,sha256=SYJuJucA_PyQ2Rsov1RaNhadOEWGDcMbQcVFrWIoy3I,1580 +matplotlib/tests/test_image.py,sha256=O9pUxNrGyewPievlC-WDhJ3hkCTsiAq4id6E6hJ22Is,59562 +matplotlib/tests/test_legend.py,sha256=MQL_blp2W5CUXFRGz8kFZzcVZ8ipnwkWr43-W-3B0HI,54829 +matplotlib/tests/test_lines.py,sha256=KElNi2GlJCtGLhNpf8rl8bkDhMcQ9ittyRdsX4rdSxo,15035 +matplotlib/tests/test_marker.py,sha256=w0WVHoaD-6iybjUSENoVFFdUOOR13943JcE4sgz3qhI,11410 +matplotlib/tests/test_mathtext.py,sha256=2S1i3BLNffEqz1t-mehhWBROBcsdBZsdgrticxW6Xrw,24549 +matplotlib/tests/test_matplotlib.py,sha256=YPBZQdO6BEC4mPKxXD_W-HoFzoOZwGGifYrYdCDcy48,2887 +matplotlib/tests/test_mlab.py,sha256=d4qMyogTFMrvlRZEpDs7SjhSmmCnBUMNgSX2bJU6eDk,42269 +matplotlib/tests/test_multivariate_colormaps.py,sha256=wGc08cdUTBDJvhUiwVqTrBS7o19Ei-wQRnWlkny7h5M,20785 +matplotlib/tests/test_offsetbox.py,sha256=m6rezy7tb5YUgiITMIpOHdDV56DBx-Cy32XFa_WTcd4,16137 +matplotlib/tests/test_patches.py,sha256=4pWXTevmwDPMn2Jw_qyynXUyBvGoTV_xo5P16PSLQLU,33466 +matplotlib/tests/test_path.py,sha256=d5xXaTscikS4R8JKMVDcwgmXIhFVQaPFY-kYCAJxddY,22158 +matplotlib/tests/test_patheffects.py,sha256=q7JA5VbZU5Zsqvc2eGM7YhB9TClkNp3akQCZVsHrNQU,8109 +matplotlib/tests/test_pickle.py,sha256=2VcsJ6QwN9vP4cvkuUCZD7uwKyd1rOCDIaYPEBr3u6E,10014 +matplotlib/tests/test_png.py,sha256=d6u6UkU71T6ULxDVSdMkIT3CUAL46dEuDuNrnjZe5BE,1407 +matplotlib/tests/test_polar.py,sha256=_h3VVisv2bMpaaRFhGEzbJe9jnzzEtKM_EvaHnw6_xA,17607 +matplotlib/tests/test_preprocess_data.py,sha256=cIVICUi1iahMQS30sqI5IlT2RYJRH2gJ0z60FyuYUFk,11363 +matplotlib/tests/test_pyplot.py,sha256=5-Rv0g037ifl64vWybmlV8y22YjuY5lVX5aPvnzihiA,13910 +matplotlib/tests/test_quiver.py,sha256=5zSF_0DpoDXPBvGWD6iu1VfIUna15npLd2UyCG6_Ksg,11953 +matplotlib/tests/test_rcparams.py,sha256=PLos0asOBQMn0hjrVP_itywcbZjT8og_TaB3tqSFHac,26506 +matplotlib/tests/test_sankey.py,sha256=yg4i-qHT5MljFzFGViOnHbMkumM8bhPwRgoQ5M6CUEs,3900 +matplotlib/tests/test_scale.py,sha256=fqLu88lVy7-JZFNslpgiid11_TIivSkM9DxVnHwZJYo,8429 +matplotlib/tests/test_simplification.py,sha256=IJ1yEz2Y58QtKRRHz41NaXVS4kkRipaE_Un0HgM4K2g,21570 +matplotlib/tests/test_skew.py,sha256=_Mjwfgce6WxLH3dafI7irv2Cw0ANifytCtQXdyOStCk,6349 +matplotlib/tests/test_sphinxext.py,sha256=9hzVcUgMhtfo_6A_c-SvCYWDQY45Oq-lD6Os4PnRg3s,9937 +matplotlib/tests/test_spines.py,sha256=O7jKGFJ1hv50k94i1fjPYNsKljDaWDHi9R5fo2bcXI0,4909 +matplotlib/tests/test_streamplot.py,sha256=mSLuZ7E2eEtcI5nKHBZZ_30OYte5awqsXWFWROJYtCQ,5731 +matplotlib/tests/test_style.py,sha256=sd6rMLpPBS1hinR2svI1rXZmnXUnMQk1szxSNBUXtzU,6509 +matplotlib/tests/test_subplots.py,sha256=7weokAQ2dmYwGAXAwqN-tfjZDrNfHxPmGgTlzI6TBAU,10771 +matplotlib/tests/test_table.py,sha256=QsrDRpe864LP3zkQBtTciKub5raYz3i3Vrt_Ve3kdzQ,8659 +matplotlib/tests/test_testing.py,sha256=eh-1r4PIXcM7hfSKCNTU899QxRYWhZBg6W7x0TDVUxo,1057 +matplotlib/tests/test_texmanager.py,sha256=j_5ZG02ztIO-m0oXzd31a13YFw_G7JR7Ldt3fwf-phI,2647 +matplotlib/tests/test_text.py,sha256=3RxKrfJJBWKFGSrEvxeCG5rxcvlkIivir8wCj1ls5-M,38473 +matplotlib/tests/test_textpath.py,sha256=WLXvT5OzE6Tew4pr87LH-cgioCzM7srgMNRemiMEC5o,271 +matplotlib/tests/test_ticker.py,sha256=Bj3hiEgXMBXzDi0Lmg8k-Qg4ChPZrQVqGU0Qfh7Sv84,74759 +matplotlib/tests/test_tightlayout.py,sha256=KU0m9BnpbKV-jBdaI4EbDLE5Cf_BLtF0Y9hcFfQ4zlQ,13978 +matplotlib/tests/test_transforms.py,sha256=NfWkPWOKAmY1sq_R1T85ct4CYdFmYUjmjtg2uwOYIIY,48671 +matplotlib/tests/test_triangulation.py,sha256=hstVYhH09BoVl8L0n6vHXBabniZLUu2KYl6wqZn2Xac,55289 +matplotlib/tests/test_type1font.py,sha256=gZZDFi0buFus_aAgOrmtwKqbC36-88UkvM8Afbcngs4,6369 +matplotlib/tests/test_units.py,sha256=0v7JNqjkA8MVnvlOeFoYqQ3sW1KjRY_ijp8BGM8ZW_U,11675 +matplotlib/tests/test_usetex.py,sha256=a-Y6NuyROPHDGP2ELsOZNSVwBUoGtAv1xQZfisl9lSE,6405 +matplotlib/tests/test_widgets.py,sha256=U2FMCCJyKD-pcLbzGzEBAL91-3Fx80wsYdinF7eCOrg,66435 +matplotlib/texmanager.py,sha256=vP9rzv_jYdsexQoAAdMt9rLd6-MwWFX6JGFX9pG4nfg,15033 +matplotlib/texmanager.pyi,sha256=di3gbC9muXKTo5VCrW5ye-e19A1mrOl8e8lvI3b904A,1116 +matplotlib/text.py,sha256=xYSkLZiOpew4G1jkLA5pUjtF7qUhOs3yp8IEFMqE6Gg,70856 +matplotlib/text.pyi,sha256=EL_-kiqz88kuPs0hss6IucRrOCsLJ4zaKOYRFV5Wbk8,7019 +matplotlib/textpath.py,sha256=QmrZQ2Mtfyao6qWg-wmEGAbTwkuJ54AnAzFf9NzZHg4,13254 +matplotlib/textpath.pyi,sha256=rqOeTAeQYgm2b2NpetrEX0gMF8PzzW43xS5mNfUA98M,2529 +matplotlib/ticker.py,sha256=YoIJyWVlYk3DIZbBl-w6fzkuVES3j1dN4nyf-SHKVDg,107381 +matplotlib/ticker.pyi,sha256=YvnQDHxJT9H-iomDZpvBCoYkfRRYSot2MR0H0pjZM78,10604 +matplotlib/transforms.py,sha256=2v-golPy7KIqabRM1cQolEF6UcbyXwKrhwcpssGe9Pk,99707 +matplotlib/transforms.pyi,sha256=0I70FR4ZWmRoo6w5KjbXRrH0dVDmQyPFbGt_sNTYq10,12102 +matplotlib/tri/__init__.py,sha256=asnfefKRpJv7sGbfddCMybnJInVDPwgph7g0mpoh2u4,820 +matplotlib/tri/_triangulation.py,sha256=Ur2lKMOx4NrZxwyi0hBeBnVzicuKaCke0NkrZneSklM,9784 +matplotlib/tri/_triangulation.pyi,sha256=pVw1rvpIcl00p7V7E9GcvJSqQWyoxlZXX_p0_VSxTiY,1017 +matplotlib/tri/_tricontour.py,sha256=yxeH8QgBub1nTq9qbO5Iiiz81bcLXDeMcJHUS2Ahmns,10220 +matplotlib/tri/_tricontour.pyi,sha256=jnsAmVRX0-FOUw9ptUgci9J4T4JQRloKeH8fh8aAi-o,1155 +matplotlib/tri/_trifinder.py,sha256=3gUzJZDIwfdsSJUE8hIKso9e1-UGvynUN9HxaqC1EEc,3522 +matplotlib/tri/_trifinder.pyi,sha256=dXcZucacAS3Ch6nrDBPh2e3LYZLfZ7VwqpBUBb-vMPo,405 +matplotlib/tri/_triinterpolate.py,sha256=4FtyJSoJpHcFxkSkZHZ1aNengVNWqVKF4l78PgCH8O0,62445 +matplotlib/tri/_triinterpolate.pyi,sha256=nR0o0Jm0uPH-6l0ft1WRHqYlq-6o84X6M0aX1UJ9IvE,1044 +matplotlib/tri/_tripcolor.py,sha256=Z6urFv-naDM3_rhv_pkgIZI53h88aOo9qWc3MH7GmBA,6705 +matplotlib/tri/_tripcolor.pyi,sha256=QsA-A2ohj3r_tAElt2-9pzi47JiU01tNlRPDIptqnh4,1781 +matplotlib/tri/_triplot.py,sha256=jlHSz36Z5S18zBKc639PlSqdhfl7jHol8ExlddJuDI4,3102 +matplotlib/tri/_triplot.pyi,sha256=9USU-BfitrcdQE8yWOUlBX59QBNoHCWivDon9JbDQ0k,446 +matplotlib/tri/_trirefine.py,sha256=NG8bsDhZ5EOxMT-MsEWzJm11ZR3_8CAYHlG53IGi0ps,13178 +matplotlib/tri/_trirefine.pyi,sha256=J_PmjbeI6UbLaeecgj1OCvGe_sr9UUsNK9NGBSlQ320,1056 +matplotlib/tri/_tritools.py,sha256=wC9KVE6UqkWVHpyW9FU4hQdqRVRVmJlhaBF1EXsaD8U,10575 +matplotlib/tri/_tritools.pyi,sha256=XWwwvH2nIAmH8k59aRjnLBVQbTwKvd_FzdsRNASCJMw,402 +matplotlib/typing.py,sha256=ZxpQ5jAqiprq3UkqWParWJITgIuOi4ZJ6WEM_q3QOis,2439 +matplotlib/units.py,sha256=7O-llc8k3GpdotUs2tWcEGgoUHHX-Y7o0R7f-1Jve3k,6429 +matplotlib/widgets.py,sha256=IbxivJqDYlQ9_utWiJpwGLxeYqPeIYLNcy2oLRjuigM,152101 +matplotlib/widgets.pyi,sha256=y4Mp69j-s0zJSVxYdmjoxR4F0bWAYApYdmW6cTW3GBc,15370 +mpl_toolkits/axes_grid1/__init__.py,sha256=wiuUCQo1g20SW5T3mFOmI9dGCJY6aDmglpQw5DfszEU,371 +mpl_toolkits/axes_grid1/anchored_artists.py,sha256=ZO5bIF_29sWrk-lWocKJ_45OQdluf_gu7nLrCtfKtdY,17161 +mpl_toolkits/axes_grid1/axes_divider.py,sha256=JD3jGBAqRHRYb1jablrpq_RFQvYtYfwtIIPJgQpbKPc,21892 +mpl_toolkits/axes_grid1/axes_grid.py,sha256=sjEbq6hu7YFimoOYVIcaccEGXsSljNPaTLfJjFHWVtM,22344 +mpl_toolkits/axes_grid1/axes_rgb.py,sha256=pabgaWJuLTCPw2FlT6Zfy5d0_95CEvaLeosWRTElR98,5227 +mpl_toolkits/axes_grid1/axes_size.py,sha256=UAEQ-t0qeJlQcwbbrY4tK2NuSWheFyUXIkk4BUCauHc,7713 +mpl_toolkits/axes_grid1/inset_locator.py,sha256=_2U8ZAj_x7gjKhPk40VSbzE7L6wO2VY-OoOMfd6xOJs,19649 +mpl_toolkits/axes_grid1/mpl_axes.py,sha256=vFCttnj9JIgY3Mt2eOi-O_FVvdZ6SW_sBtIBFib6bz4,4251 +mpl_toolkits/axes_grid1/parasite_axes.py,sha256=809Uy3bLgXIDIGztefcvS9EoZcu3KfMV1QwLzX8qVT4,9404 +mpl_toolkits/axes_grid1/tests/__init__.py,sha256=sKLxL9jEJBX7eh5OumtXSOnTriPrJUkujTHFtnJVFrM,365 +mpl_toolkits/axes_grid1/tests/conftest.py,sha256=zB61sy90X97YJ16mIGiuaEAaBIjBEzRAK_qfSCichQM,147 +mpl_toolkits/axes_grid1/tests/test_axes_grid1.py,sha256=x-W845Cvl2O-eGGhpxKDayFg7-QQXNA94nODLCFPpJc,29093 +mpl_toolkits/axisartist/__init__.py,sha256=RPaNDl22FbmDP7ZRsku1yCqpoNqcclCk0a3rXj3G7fE,631 +mpl_toolkits/axisartist/angle_helper.py,sha256=-mjKpaR1pLMJuoc0sx0_V3bv0iRPMrpS7r_WI0UYrCc,12952 +mpl_toolkits/axisartist/axes_divider.py,sha256=65xSCQ9cHSC3KE7J7HxS4bfsDTAbPmwyz1jJ43BqnBs,122 +mpl_toolkits/axisartist/axis_artist.py,sha256=9FY9yXl8eF5QuBvF8-Vipv_rmdpcOlA3Q48a5v7FoeA,38328 +mpl_toolkits/axisartist/axisline_style.py,sha256=9jbDkXEzMQiDHR-lDYKZEvTADtJwt2qlN1cErVUUdx0,6723 +mpl_toolkits/axisartist/axislines.py,sha256=QxKvChTaRPj0ovvxdrfr3pOzExEIf5svDwxRE7enEXg,16556 +mpl_toolkits/axisartist/floating_axes.py,sha256=kfhWKkmiy8tkXoVHvooTidXIlicU861VncElsUcBhLo,10337 +mpl_toolkits/axisartist/grid_finder.py,sha256=Hi2zwnQilavgrqWKScuSRba_WBPqqrbmErGU0XyAsOo,12265 +mpl_toolkits/axisartist/grid_helper_curvelinear.py,sha256=ofN7pPqEMh3r6bSf9eOHkAZEOrmhiNZV-Yig3jozkC4,12349 +mpl_toolkits/axisartist/parasite_axes.py,sha256=Ydi4-0Lbczr6K7Sz1-fRwK4Tm8KlHrOIumx67Xbo_9c,244 +mpl_toolkits/axisartist/tests/__init__.py,sha256=sKLxL9jEJBX7eh5OumtXSOnTriPrJUkujTHFtnJVFrM,365 +mpl_toolkits/axisartist/tests/conftest.py,sha256=zB61sy90X97YJ16mIGiuaEAaBIjBEzRAK_qfSCichQM,147 +mpl_toolkits/axisartist/tests/test_angle_helper.py,sha256=PwhJwBm2kk4uMyhdO5arQs8IlqSX2vN0hvUzI7YHqrw,5670 +mpl_toolkits/axisartist/tests/test_axis_artist.py,sha256=wt3bicVgUPnBX48-dH0Z6hboHgutIgwVpaGkcUZDeVU,2980 +mpl_toolkits/axisartist/tests/test_axislines.py,sha256=NXegrvEzVWovshta-qjbUKA2tpQcAbjYbfwkf6tKT6Y,4353 +mpl_toolkits/axisartist/tests/test_floating_axes.py,sha256=l24VB1SLrsJZOMMH2jmBny9ETha4AqAM5KokdGOa5Wk,4083 +mpl_toolkits/axisartist/tests/test_grid_finder.py,sha256=cwQLDOdcJbAY2E7dr8595yzuNh1_Yh80r_O8WGT2hMY,1156 +mpl_toolkits/axisartist/tests/test_grid_helper_curvelinear.py,sha256=OhHej0vCfCjJJknT7yIt4OxZd6OMJCXnFoT3pzqUtTo,7216 +mpl_toolkits/mplot3d/__init__.py,sha256=fH9HdMfFMvjbIWqy2gjQnm2m3ae1CvLiuH6LwKHo0kI,49 +mpl_toolkits/mplot3d/art3d.py,sha256=ovygkCV4P7dQ9xIicczbJyyAbaEy2C1HYnzjuV3COus,50548 +mpl_toolkits/mplot3d/axes3d.py,sha256=hCBda2QB6_OYyKEJOG0paPEgDPmLIcrEMkZuKqOUFho,157921 +mpl_toolkits/mplot3d/axis3d.py,sha256=eQPWo2TKbRsPY8JUt5drjBbyRdVKBgeT2EF4HGg51oU,29327 +mpl_toolkits/mplot3d/proj3d.py,sha256=6Hm6WPzeu_wjfeR8afrQ1nCfjS0p3wjvoSIxJWVlS0s,6349 +mpl_toolkits/mplot3d/tests/__init__.py,sha256=sKLxL9jEJBX7eh5OumtXSOnTriPrJUkujTHFtnJVFrM,365 +mpl_toolkits/mplot3d/tests/conftest.py,sha256=zB61sy90X97YJ16mIGiuaEAaBIjBEzRAK_qfSCichQM,147 +mpl_toolkits/mplot3d/tests/test_art3d.py,sha256=AdFcJf0vz1_b-PIn2cVonPHF1vZw2EOHvmjdgBRa5Yo,3317 +mpl_toolkits/mplot3d/tests/test_axes3d.py,sha256=Xf5M1DipyVfsBBF8dA3AXg5U0Hts194EfAh57VSupE4,92348 +mpl_toolkits/mplot3d/tests/test_legend3d.py,sha256=QGPaoaucJP9KIC68g8zmk4divB_w5PtQc4DMIHMpcA8,4343 +pylab.py,sha256=zUXU0l7e7C5jmDSJbM0GLQxBun3xzuXNf1tuoZYA6Xk,110 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/REQUESTED b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/WHEEL b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..4e4c38ae320920b8f083b87f408214cdecd350d2 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib-3.10.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: meson +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/__init__.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc92742ec984e31b4f988477b92cbe974fbc6e94 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/__init__.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_docstring.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_docstring.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..786ecf22c7bf51321414d08860f024f4ba8e0913 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_docstring.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_version.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4269448a509129a5833d7e25ea95aaef26fd0c76 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/_version.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/bezier.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/bezier.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d645fd8cb3b5ec2c71afb6a74d4e0c2c758bb846 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/bezier.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/cbook.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/cbook.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..028d16019194b24d3459c858a9a9c7b43e8e6621 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/cbook.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/path.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/path.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64c970c1ddc93ff00d3b22f6bca4495aaf6d4041 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/path.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/rcsetup.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/rcsetup.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a426532cd3f32873ec664ff3fa9adc8304183f99 Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/rcsetup.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/ticker.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/ticker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bae9be6b0f6240f8cfad683c8bfe367cd92e48ca Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/ticker.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/transforms.cpython-310.pyc b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/transforms.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a110aab6f8dca6f4bdafc504e81e686136c058e Binary files /dev/null and b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/__pycache__/transforms.cpython-310.pyc differ diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/__init__.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8cce4fe4558d30d6c71ec7a63e347a0fb9bf7ada --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/__init__.py @@ -0,0 +1,10 @@ +from pathlib import Path + + +# Check that the test directories exist. +if not (Path(__file__).parent / 'baseline_images').exists(): + raise OSError( + 'The baseline image directory does not exist. ' + 'This is most likely because the test data is not installed. ' + 'You may need to install matplotlib from source to get the ' + 'test data.') diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/conftest.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..6afd566750e9de31bd05c37c6e60dde3d7172db7 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/conftest.py @@ -0,0 +1,2 @@ +from matplotlib.testing.conftest import ( # noqa + mpl_test_settings, pytest_configure, pytest_unconfigure, pd, text_placeholders, xr) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_afm.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_afm.py new file mode 100644 index 0000000000000000000000000000000000000000..e5c6a83937cd68e3ae20a14d8babb9a99bd5a4f5 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_afm.py @@ -0,0 +1,137 @@ +from io import BytesIO +import pytest +import logging + +from matplotlib import _afm +from matplotlib import font_manager as fm + + +# See note in afm.py re: use of comma as decimal separator in the +# UnderlineThickness field and re: use of non-ASCII characters in the Notice +# field. +AFM_TEST_DATA = b"""StartFontMetrics 2.0 +Comment Comments are ignored. +Comment Creation Date:Mon Nov 13 12:34:11 GMT 2017 +FontName MyFont-Bold +EncodingScheme FontSpecific +FullName My Font Bold +FamilyName Test Fonts +Weight Bold +ItalicAngle 0.0 +IsFixedPitch false +UnderlinePosition -100 +UnderlineThickness 56,789 +Version 001.000 +Notice Copyright \xa9 2017 No one. +FontBBox 0 -321 1234 369 +StartCharMetrics 3 +C 0 ; WX 250 ; N space ; B 0 0 0 0 ; +C 42 ; WX 1141 ; N foo ; B 40 60 800 360 ; +C 99 ; WX 583 ; N bar ; B 40 -10 543 210 ; +EndCharMetrics +EndFontMetrics +""" + + +def test_nonascii_str(): + # This tests that we also decode bytes as utf-8 properly. + # Else, font files with non ascii characters fail to load. + inp_str = "привет" + byte_str = inp_str.encode("utf8") + + ret = _afm._to_str(byte_str) + assert ret == inp_str + + +def test_parse_header(): + fh = BytesIO(AFM_TEST_DATA) + header = _afm._parse_header(fh) + assert header == { + b'StartFontMetrics': 2.0, + b'FontName': 'MyFont-Bold', + b'EncodingScheme': 'FontSpecific', + b'FullName': 'My Font Bold', + b'FamilyName': 'Test Fonts', + b'Weight': 'Bold', + b'ItalicAngle': 0.0, + b'IsFixedPitch': False, + b'UnderlinePosition': -100, + b'UnderlineThickness': 56.789, + b'Version': '001.000', + b'Notice': b'Copyright \xa9 2017 No one.', + b'FontBBox': [0, -321, 1234, 369], + b'StartCharMetrics': 3, + } + + +def test_parse_char_metrics(): + fh = BytesIO(AFM_TEST_DATA) + _afm._parse_header(fh) # position + metrics = _afm._parse_char_metrics(fh) + assert metrics == ( + {0: (250.0, 'space', [0, 0, 0, 0]), + 42: (1141.0, 'foo', [40, 60, 800, 360]), + 99: (583.0, 'bar', [40, -10, 543, 210]), + }, + {'space': (250.0, 'space', [0, 0, 0, 0]), + 'foo': (1141.0, 'foo', [40, 60, 800, 360]), + 'bar': (583.0, 'bar', [40, -10, 543, 210]), + }) + + +def test_get_familyname_guessed(): + fh = BytesIO(AFM_TEST_DATA) + font = _afm.AFM(fh) + del font._header[b'FamilyName'] # remove FamilyName, so we have to guess + assert font.get_familyname() == 'My Font' + + +def test_font_manager_weight_normalization(): + font = _afm.AFM(BytesIO( + AFM_TEST_DATA.replace(b"Weight Bold\n", b"Weight Custom\n"))) + assert fm.afmFontProperty("", font).weight == "normal" + + +@pytest.mark.parametrize( + "afm_data", + [ + b"""nope +really nope""", + b"""StartFontMetrics 2.0 +Comment Comments are ignored. +Comment Creation Date:Mon Nov 13 12:34:11 GMT 2017 +FontName MyFont-Bold +EncodingScheme FontSpecific""", + ], +) +def test_bad_afm(afm_data): + fh = BytesIO(afm_data) + with pytest.raises(RuntimeError): + _afm._parse_header(fh) + + +@pytest.mark.parametrize( + "afm_data", + [ + b"""StartFontMetrics 2.0 +Comment Comments are ignored. +Comment Creation Date:Mon Nov 13 12:34:11 GMT 2017 +Aardvark bob +FontName MyFont-Bold +EncodingScheme FontSpecific +StartCharMetrics 3""", + b"""StartFontMetrics 2.0 +Comment Comments are ignored. +Comment Creation Date:Mon Nov 13 12:34:11 GMT 2017 +ItalicAngle zero degrees +FontName MyFont-Bold +EncodingScheme FontSpecific +StartCharMetrics 3""", + ], +) +def test_malformed_header(afm_data, caplog): + fh = BytesIO(afm_data) + with caplog.at_level(logging.ERROR): + _afm._parse_header(fh) + + assert len(caplog.records) == 1 diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg.py new file mode 100644 index 0000000000000000000000000000000000000000..59387793605a0fe0f0b609773cf4f0641a3f6191 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg.py @@ -0,0 +1,340 @@ +import io + +import numpy as np +from numpy.testing import assert_array_almost_equal +from PIL import features, Image, TiffTags +import pytest + + +from matplotlib import ( + collections, patheffects, pyplot as plt, transforms as mtransforms, + rcParams, rc_context) +from matplotlib.backends.backend_agg import RendererAgg +from matplotlib.figure import Figure +from matplotlib.image import imread +from matplotlib.path import Path +from matplotlib.testing.decorators import image_comparison +from matplotlib.transforms import IdentityTransform + + +def test_repeated_save_with_alpha(): + # We want an image which has a background color of bluish green, with an + # alpha of 0.25. + + fig = Figure([1, 0.4]) + fig.set_facecolor((0, 1, 0.4)) + fig.patch.set_alpha(0.25) + + # The target color is fig.patch.get_facecolor() + + buf = io.BytesIO() + + fig.savefig(buf, + facecolor=fig.get_facecolor(), + edgecolor='none') + + # Save the figure again to check that the + # colors don't bleed from the previous renderer. + buf.seek(0) + fig.savefig(buf, + facecolor=fig.get_facecolor(), + edgecolor='none') + + # Check the first pixel has the desired color & alpha + # (approx: 0, 1.0, 0.4, 0.25) + buf.seek(0) + assert_array_almost_equal(tuple(imread(buf)[0, 0]), + (0.0, 1.0, 0.4, 0.250), + decimal=3) + + +def test_large_single_path_collection(): + buff = io.BytesIO() + + # Generates a too-large single path in a path collection that + # would cause a segfault if the draw_markers optimization is + # applied. + f, ax = plt.subplots() + collection = collections.PathCollection( + [Path([[-10, 5], [10, 5], [10, -5], [-10, -5], [-10, 5]])]) + ax.add_artist(collection) + ax.set_xlim(10**-3, 1) + plt.savefig(buff) + + +def test_marker_with_nan(): + # This creates a marker with nans in it, which was segfaulting the + # Agg backend (see #3722) + fig, ax = plt.subplots(1) + steps = 1000 + data = np.arange(steps) + ax.semilogx(data) + ax.fill_between(data, data*0.8, data*1.2) + buf = io.BytesIO() + fig.savefig(buf, format='png') + + +def test_long_path(): + buff = io.BytesIO() + fig = Figure() + ax = fig.subplots() + points = np.ones(100_000) + points[::2] *= -1 + ax.plot(points) + fig.savefig(buff, format='png') + + +@image_comparison(['agg_filter.png'], remove_text=True) +def test_agg_filter(): + def smooth1d(x, window_len): + # copied from https://scipy-cookbook.readthedocs.io/ + s = np.r_[ + 2*x[0] - x[window_len:1:-1], x, 2*x[-1] - x[-1:-window_len:-1]] + w = np.hanning(window_len) + y = np.convolve(w/w.sum(), s, mode='same') + return y[window_len-1:-window_len+1] + + def smooth2d(A, sigma=3): + window_len = max(int(sigma), 3) * 2 + 1 + A = np.apply_along_axis(smooth1d, 0, A, window_len) + A = np.apply_along_axis(smooth1d, 1, A, window_len) + return A + + class BaseFilter: + + def get_pad(self, dpi): + return 0 + + def process_image(self, padded_src, dpi): + raise NotImplementedError("Should be overridden by subclasses") + + def __call__(self, im, dpi): + pad = self.get_pad(dpi) + padded_src = np.pad(im, [(pad, pad), (pad, pad), (0, 0)], + "constant") + tgt_image = self.process_image(padded_src, dpi) + return tgt_image, -pad, -pad + + class OffsetFilter(BaseFilter): + + def __init__(self, offsets=(0, 0)): + self.offsets = offsets + + def get_pad(self, dpi): + return int(max(self.offsets) / 72 * dpi) + + def process_image(self, padded_src, dpi): + ox, oy = self.offsets + a1 = np.roll(padded_src, int(ox / 72 * dpi), axis=1) + a2 = np.roll(a1, -int(oy / 72 * dpi), axis=0) + return a2 + + class GaussianFilter(BaseFilter): + """Simple Gaussian filter.""" + + def __init__(self, sigma, alpha=0.5, color=(0, 0, 0)): + self.sigma = sigma + self.alpha = alpha + self.color = color + + def get_pad(self, dpi): + return int(self.sigma*3 / 72 * dpi) + + def process_image(self, padded_src, dpi): + tgt_image = np.empty_like(padded_src) + tgt_image[:, :, :3] = self.color + tgt_image[:, :, 3] = smooth2d(padded_src[:, :, 3] * self.alpha, + self.sigma / 72 * dpi) + return tgt_image + + class DropShadowFilter(BaseFilter): + + def __init__(self, sigma, alpha=0.3, color=(0, 0, 0), offsets=(0, 0)): + self.gauss_filter = GaussianFilter(sigma, alpha, color) + self.offset_filter = OffsetFilter(offsets) + + def get_pad(self, dpi): + return max(self.gauss_filter.get_pad(dpi), + self.offset_filter.get_pad(dpi)) + + def process_image(self, padded_src, dpi): + t1 = self.gauss_filter.process_image(padded_src, dpi) + t2 = self.offset_filter.process_image(t1, dpi) + return t2 + + fig, ax = plt.subplots() + + # draw lines + line1, = ax.plot([0.1, 0.5, 0.9], [0.1, 0.9, 0.5], "bo-", + mec="b", mfc="w", lw=5, mew=3, ms=10, label="Line 1") + line2, = ax.plot([0.1, 0.5, 0.9], [0.5, 0.2, 0.7], "ro-", + mec="r", mfc="w", lw=5, mew=3, ms=10, label="Line 1") + + gauss = DropShadowFilter(4) + + for line in [line1, line2]: + + # draw shadows with same lines with slight offset. + xx = line.get_xdata() + yy = line.get_ydata() + shadow, = ax.plot(xx, yy) + shadow.update_from(line) + + # offset transform + transform = mtransforms.offset_copy( + line.get_transform(), fig, x=4.0, y=-6.0, units='points') + shadow.set_transform(transform) + + # adjust zorder of the shadow lines so that it is drawn below the + # original lines + shadow.set_zorder(line.get_zorder() - 0.5) + shadow.set_agg_filter(gauss) + shadow.set_rasterized(True) # to support mixed-mode renderers + + ax.set_xlim(0., 1.) + ax.set_ylim(0., 1.) + + ax.xaxis.set_visible(False) + ax.yaxis.set_visible(False) + + +def test_too_large_image(): + fig = plt.figure(figsize=(300, 2**25)) + buff = io.BytesIO() + with pytest.raises(ValueError): + fig.savefig(buff) + + +def test_chunksize(): + x = range(200) + + # Test without chunksize + fig, ax = plt.subplots() + ax.plot(x, np.sin(x)) + fig.canvas.draw() + + # Test with chunksize + fig, ax = plt.subplots() + rcParams['agg.path.chunksize'] = 105 + ax.plot(x, np.sin(x)) + fig.canvas.draw() + + +@pytest.mark.backend('Agg') +def test_jpeg_dpi(): + # Check that dpi is set correctly in jpg files. + plt.plot([0, 1, 2], [0, 1, 0]) + buf = io.BytesIO() + plt.savefig(buf, format="jpg", dpi=200) + im = Image.open(buf) + assert im.info['dpi'] == (200, 200) + + +def test_pil_kwargs_png(): + from PIL.PngImagePlugin import PngInfo + buf = io.BytesIO() + pnginfo = PngInfo() + pnginfo.add_text("Software", "test") + plt.figure().savefig(buf, format="png", pil_kwargs={"pnginfo": pnginfo}) + im = Image.open(buf) + assert im.info["Software"] == "test" + + +def test_pil_kwargs_tiff(): + buf = io.BytesIO() + pil_kwargs = {"description": "test image"} + plt.figure().savefig(buf, format="tiff", pil_kwargs=pil_kwargs) + im = Image.open(buf) + tags = {TiffTags.TAGS_V2[k].name: v for k, v in im.tag_v2.items()} + assert tags["ImageDescription"] == "test image" + + +@pytest.mark.skipif(not features.check("webp"), reason="WebP support not available") +def test_pil_kwargs_webp(): + plt.plot([0, 1, 2], [0, 1, 0]) + buf_small = io.BytesIO() + pil_kwargs_low = {"quality": 1} + plt.savefig(buf_small, format="webp", pil_kwargs=pil_kwargs_low) + assert len(pil_kwargs_low) == 1 + buf_large = io.BytesIO() + pil_kwargs_high = {"quality": 100} + plt.savefig(buf_large, format="webp", pil_kwargs=pil_kwargs_high) + assert len(pil_kwargs_high) == 1 + assert buf_large.getbuffer().nbytes > buf_small.getbuffer().nbytes + + +@pytest.mark.skipif(not features.check("webp"), reason="WebP support not available") +def test_webp_alpha(): + plt.plot([0, 1, 2], [0, 1, 0]) + buf = io.BytesIO() + plt.savefig(buf, format="webp", transparent=True) + im = Image.open(buf) + assert im.mode == "RGBA" + + +def test_draw_path_collection_error_handling(): + fig, ax = plt.subplots() + ax.scatter([1], [1]).set_paths(Path([(0, 1), (2, 3)])) + with pytest.raises(TypeError): + fig.canvas.draw() + + +def test_chunksize_fails(): + # NOTE: This test covers multiple independent test scenarios in a single + # function, because each scenario uses ~2GB of memory and we don't + # want parallel test executors to accidentally run multiple of these + # at the same time. + + N = 100_000 + dpi = 500 + w = 5*dpi + h = 6*dpi + + # make a Path that spans the whole w-h rectangle + x = np.linspace(0, w, N) + y = np.ones(N) * h + y[::2] = 0 + path = Path(np.vstack((x, y)).T) + # effectively disable path simplification (but leaving it "on") + path.simplify_threshold = 0 + + # setup the minimal GraphicsContext to draw a Path + ra = RendererAgg(w, h, dpi) + gc = ra.new_gc() + gc.set_linewidth(1) + gc.set_foreground('r') + + gc.set_hatch('/') + with pytest.raises(OverflowError, match='cannot split hatched path'): + ra.draw_path(gc, path, IdentityTransform()) + gc.set_hatch(None) + + with pytest.raises(OverflowError, match='cannot split filled path'): + ra.draw_path(gc, path, IdentityTransform(), (1, 0, 0)) + + # Set to zero to disable, currently defaults to 0, but let's be sure. + with rc_context({'agg.path.chunksize': 0}): + with pytest.raises(OverflowError, match='Please set'): + ra.draw_path(gc, path, IdentityTransform()) + + # Set big enough that we do not try to chunk. + with rc_context({'agg.path.chunksize': 1_000_000}): + with pytest.raises(OverflowError, match='Please reduce'): + ra.draw_path(gc, path, IdentityTransform()) + + # Small enough we will try to chunk, but big enough we will fail to render. + with rc_context({'agg.path.chunksize': 90_000}): + with pytest.raises(OverflowError, match='Please reduce'): + ra.draw_path(gc, path, IdentityTransform()) + + path.should_simplify = False + with pytest.raises(OverflowError, match="should_simplify is False"): + ra.draw_path(gc, path, IdentityTransform()) + + +def test_non_tuple_rgbaface(): + # This passes rgbaFace as a ndarray to draw_path. + fig = plt.figure() + fig.add_subplot(projection="3d").scatter( + [0, 1, 2], [0, 1, 2], path_effects=[patheffects.Stroke(linewidth=4)]) + fig.canvas.draw() diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg_filter.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..dc8cff6858ae52a85028b4f8b3266b4d1880f687 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_agg_filter.py @@ -0,0 +1,33 @@ +import numpy as np + +import matplotlib.pyplot as plt +from matplotlib.testing.decorators import image_comparison + + +@image_comparison(baseline_images=['agg_filter_alpha'], + extensions=['png', 'pdf']) +def test_agg_filter_alpha(): + # Remove this line when this test image is regenerated. + plt.rcParams['pcolormesh.snap'] = False + + ax = plt.axes() + x, y = np.mgrid[0:7, 0:8] + data = x**2 - y**2 + mesh = ax.pcolormesh(data, cmap='Reds', zorder=5) + + def manual_alpha(im, dpi): + im[:, :, 3] *= 0.6 + print('CALLED') + return im, 0, 0 + + # Note: Doing alpha like this is not the same as setting alpha on + # the mesh itself. Currently meshes are drawn as independent patches, + # and we see fine borders around the blocks of color. See the SO + # question for an example: https://stackoverflow.com/q/20678817/ + mesh.set_agg_filter(manual_alpha) + + # Currently we must enable rasterization for this to have an effect in + # the PDF backend. + mesh.set_rasterized(True) + + ax.plot([0, 4, 7], [1, 3, 8]) diff --git a/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_animation.py b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_animation.py new file mode 100644 index 0000000000000000000000000000000000000000..e3382f662c73cb80cf93f949df533b5812bb3981 --- /dev/null +++ b/Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/matplotlib/tests/test_animation.py @@ -0,0 +1,571 @@ +import os +from pathlib import Path +import platform +import re +import shutil +import subprocess +import sys +import weakref + +import numpy as np +import pytest + +import matplotlib as mpl +from matplotlib import pyplot as plt +from matplotlib import animation +from matplotlib.animation import PillowWriter +from matplotlib.testing.decorators import check_figures_equal + + +@pytest.fixture() +def anim(request): + """Create a simple animation (with options).""" + fig, ax = plt.subplots() + line, = ax.plot([], []) + + ax.set_xlim(0, 10) + ax.set_ylim(-1, 1) + + def init(): + line.set_data([], []) + return line, + + def animate(i): + x = np.linspace(0, 10, 100) + y = np.sin(x + i) + line.set_data(x, y) + return line, + + # "klass" can be passed to determine the class returned by the fixture + kwargs = dict(getattr(request, 'param', {})) # make a copy + klass = kwargs.pop('klass', animation.FuncAnimation) + if 'frames' not in kwargs: + kwargs['frames'] = 5 + return klass(fig=fig, func=animate, init_func=init, **kwargs) + + +class NullMovieWriter(animation.AbstractMovieWriter): + """ + A minimal MovieWriter. It doesn't actually write anything. + It just saves the arguments that were given to the setup() and + grab_frame() methods as attributes, and counts how many times + grab_frame() is called. + + This class doesn't have an __init__ method with the appropriate + signature, and it doesn't define an isAvailable() method, so + it cannot be added to the 'writers' registry. + """ + + def setup(self, fig, outfile, dpi, *args): + self.fig = fig + self.outfile = outfile + self.dpi = dpi + self.args = args + self._count = 0 + + def grab_frame(self, **savefig_kwargs): + from matplotlib.animation import _validate_grabframe_kwargs + _validate_grabframe_kwargs(savefig_kwargs) + self.savefig_kwargs = savefig_kwargs + self._count += 1 + + def finish(self): + pass + + +def test_null_movie_writer(anim): + # Test running an animation with NullMovieWriter. + plt.rcParams["savefig.facecolor"] = "auto" + filename = "unused.null" + dpi = 50 + savefig_kwargs = dict(foo=0) + writer = NullMovieWriter() + + anim.save(filename, dpi=dpi, writer=writer, + savefig_kwargs=savefig_kwargs) + + assert writer.fig == plt.figure(1) # The figure used by anim fixture + assert writer.outfile == filename + assert writer.dpi == dpi + assert writer.args == () + # we enrich the savefig kwargs to ensure we composite transparent + # output to an opaque background + for k, v in savefig_kwargs.items(): + assert writer.savefig_kwargs[k] == v + assert writer._count == anim._save_count + + +@pytest.mark.parametrize('anim', [dict(klass=dict)], indirect=['anim']) +def test_animation_delete(anim): + if platform.python_implementation() == 'PyPy': + # Something in the test setup fixture lingers around into the test and + # breaks pytest.warns on PyPy. This garbage collection fixes it. + # https://foss.heptapod.net/pypy/pypy/-/issues/3536 + np.testing.break_cycles() + anim = animation.FuncAnimation(**anim) + with pytest.warns(Warning, match='Animation was deleted'): + del anim + np.testing.break_cycles() + + +def test_movie_writer_dpi_default(): + class DummyMovieWriter(animation.MovieWriter): + def _run(self): + pass + + # Test setting up movie writer with figure.dpi default. + fig = plt.figure() + + filename = "unused.null" + fps = 5 + codec = "unused" + bitrate = 1 + extra_args = ["unused"] + + writer = DummyMovieWriter(fps, codec, bitrate, extra_args) + writer.setup(fig, filename) + assert writer.dpi == fig.dpi + + +@animation.writers.register('null') +class RegisteredNullMovieWriter(NullMovieWriter): + + # To be able to add NullMovieWriter to the 'writers' registry, + # we must define an __init__ method with a specific signature, + # and we must define the class method isAvailable(). + # (These methods are not actually required to use an instance + # of this class as the 'writer' argument of Animation.save().) + + def __init__(self, fps=None, codec=None, bitrate=None, + extra_args=None, metadata=None): + pass + + @classmethod + def isAvailable(cls): + return True + + +WRITER_OUTPUT = [ + ('ffmpeg', 'movie.mp4'), + ('ffmpeg_file', 'movie.mp4'), + ('imagemagick', 'movie.gif'), + ('imagemagick_file', 'movie.gif'), + ('pillow', 'movie.gif'), + ('html', 'movie.html'), + ('null', 'movie.null') +] + + +def gen_writers(): + for writer, output in WRITER_OUTPUT: + if not animation.writers.is_available(writer): + mark = pytest.mark.skip( + f"writer '{writer}' not available on this system") + yield pytest.param(writer, None, output, marks=[mark]) + yield pytest.param(writer, None, Path(output), marks=[mark]) + continue + + writer_class = animation.writers[writer] + for frame_format in getattr(writer_class, 'supported_formats', [None]): + yield writer, frame_format, output + yield writer, frame_format, Path(output) + + +# Smoke test for saving animations. In the future, we should probably +# design more sophisticated tests which compare resulting frames a-la +# matplotlib.testing.image_comparison +@pytest.mark.parametrize('writer, frame_format, output', gen_writers()) +@pytest.mark.parametrize('anim', [dict(klass=dict)], indirect=['anim']) +def test_save_animation_smoketest(tmpdir, writer, frame_format, output, anim): + if frame_format is not None: + plt.rcParams["animation.frame_format"] = frame_format + anim = animation.FuncAnimation(**anim) + dpi = None + codec = None + if writer == 'ffmpeg': + # Issue #8253 + anim._fig.set_size_inches((10.85, 9.21)) + dpi = 100. + codec = 'h264' + + # Use temporary directory for the file-based writers, which produce a file + # per frame with known names. + with tmpdir.as_cwd(): + anim.save(output, fps=30, writer=writer, bitrate=500, dpi=dpi, + codec=codec) + + del anim + + +@pytest.mark.parametrize('writer, frame_format, output', gen_writers()) +def test_grabframe(tmpdir, writer, frame_format, output): + WriterClass = animation.writers[writer] + + if frame_format is not None: + plt.rcParams["animation.frame_format"] = frame_format + + fig, ax = plt.subplots() + + dpi = None + codec = None + if writer == 'ffmpeg': + # Issue #8253 + fig.set_size_inches((10.85, 9.21)) + dpi = 100. + codec = 'h264' + + test_writer = WriterClass() + # Use temporary directory for the file-based writers, which produce a file + # per frame with known names. + with tmpdir.as_cwd(): + with test_writer.saving(fig, output, dpi): + # smoke test it works + test_writer.grab_frame() + for k in {'dpi', 'bbox_inches', 'format'}: + with pytest.raises( + TypeError, + match=f"grab_frame got an unexpected keyword argument {k!r}" + ): + test_writer.grab_frame(**{k: object()}) + + +@pytest.mark.parametrize('writer', [ + pytest.param( + 'ffmpeg', marks=pytest.mark.skipif( + not animation.FFMpegWriter.isAvailable(), + reason='Requires FFMpeg')), + pytest.param( + 'imagemagick', marks=pytest.mark.skipif( + not animation.ImageMagickWriter.isAvailable(), + reason='Requires ImageMagick')), +]) +@pytest.mark.parametrize('html, want', [ + ('none', None), + ('html5', '