From 1b4ce18eca5feefc3cd8e55d8d689dba3c6180cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Clau=C3=9F?= Date: Mon, 27 Feb 2023 17:07:04 +0100 Subject: [PATCH] init package and add first subpackages --- Makefile | 2 + README.md | 1 + poetry.lock | 375 ++++++++++++++++ pyproject._toml | 33 ++ setup.cfg | 30 ++ setup.py | 6 + src/paveit/__init__.py | 4 + src/paveit/analysis/__init__.py | 9 + src/paveit/analysis/regression.py | 159 +++++++ src/paveit/helper/__init__.py | 6 + src/paveit/helper/filehasher.py | 22 + src/paveit/helper/minio.py | 40 ++ src/paveit/labtest/__init__.py | 6 + src/paveit/labtest/base.py | 90 ++++ src/paveit/labtest/citt.py | 192 +++++++++ src/paveit/labtest/sheartest.py | 683 ++++++++++++++++++++++++++++++ 16 files changed, 1658 insertions(+) create mode 100644 Makefile create mode 100644 README.md create mode 100644 poetry.lock create mode 100644 pyproject._toml create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 src/paveit/__init__.py create mode 100644 src/paveit/analysis/__init__.py create mode 100644 src/paveit/analysis/regression.py create mode 100644 src/paveit/helper/__init__.py create mode 100644 src/paveit/helper/filehasher.py create mode 100644 src/paveit/helper/minio.py create mode 100644 src/paveit/labtest/__init__.py create mode 100644 src/paveit/labtest/base.py create mode 100644 src/paveit/labtest/citt.py create mode 100644 src/paveit/labtest/sheartest.py diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0ed9334 --- /dev/null +++ b/Makefile @@ -0,0 +1,2 @@ +link: + pip install -e ./ \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..9d3d585 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# PAVE-IT Python Package \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..2ed6e28 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,375 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "asteval" +version = "0.9.29" +description = "Safe, minimalistic evaluator of python expression using ast module" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asteval-0.9.29-py3-none-any.whl", hash = "sha256:134e42fc4790582f2f926999e59abb444fb491046ba396836962268aad8a68a5"}, + {file = "asteval-0.9.29.tar.gz", hash = "sha256:ab98c61ba9394149c774ae7861497e9c32580301aa693ca19746997216c31fab"}, +] + +[package.extras] +all = ["Sphinx", "build", "coverage", "pytest", "pytest-cov", "twine"] +dev = ["build", "twine"] +doc = ["Sphinx"] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "dnspython" +version = "2.3.0" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"}, + {file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"}, +] + +[package.extras] +curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<40.0)"] +doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"] +doq = ["aioquic (>=0.9.20)"] +idna = ["idna (>=2.1,<4.0)"] +trio = ["trio (>=0.14,<0.23)"] +wmi = ["wmi (>=1.5.1,<2.0.0)"] + +[[package]] +name = "future" +version = "0.18.3" +description = "Clean single-source support for Python 3 and 2" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, +] + +[[package]] +name = "lmfit" +version = "1.1.0" +description = "Least-Squares Minimization with Bounds and Constraints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "lmfit-1.1.0-py3-none-any.whl", hash = "sha256:29f0540f94b3969a23db2b51abf309f327af8ea3667443ac4cd93d07fdfdb14f"}, + {file = "lmfit-1.1.0.tar.gz", hash = "sha256:a2755b708ad7bad010178da28f082f55cbee7a084a625b452632e2d77b5391fb"}, +] + +[package.dependencies] +asteval = ">=0.9.28" +numpy = ">=1.19" +scipy = ">=1.6" +uncertainties = ">=3.1.4" + +[package.extras] +all = ["Pillow", "Sphinx", "build", "cairosvg", "check-wheel-contents", "codecov", "corner", "coverage", "dill", "emcee (>=3.0.0)", "flaky", "jupyter-sphinx (>=0.2.4)", "matplotlib", "numdifftools", "pandas", "pre-commit", "pycairo", "pytest", "pytest-cov", "sphinx-gallery (>=0.10)", "sphinxcontrib-svg2pdfconverter", "sympy", "twine"] +dev = ["build", "check-wheel-contents", "pre-commit", "twine"] +doc = ["Pillow", "Sphinx", "cairosvg", "corner", "dill", "emcee (>=3.0.0)", "jupyter-sphinx (>=0.2.4)", "matplotlib", "numdifftools", "pandas", "pycairo", "sphinx-gallery (>=0.10)", "sphinxcontrib-svg2pdfconverter", "sympy"] +test = ["codecov", "coverage", "flaky", "pytest", "pytest-cov"] + +[[package]] +name = "mongoengine" +version = "0.26.0" +description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mongoengine-0.26.0-py3-none-any.whl", hash = "sha256:020a0779d1830affc649f2760d8c408e998981f18898e425eb041915181d3a53"}, + {file = "mongoengine-0.26.0.tar.gz", hash = "sha256:3f284bdcbe8d1a3a9b8ab7d3c3ed672d10b8fd2e545447cd1d75e40d6e978332"}, +] + +[package.dependencies] +pymongo = ">=3.4,<5.0" + +[[package]] +name = "numpy" +version = "1.24.2" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"}, + {file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"}, + {file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"}, + {file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"}, + {file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"}, + {file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"}, + {file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"}, + {file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"}, + {file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"}, + {file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"}, + {file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"}, +] + +[[package]] +name = "pandas" +version = "1.5.3" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" + +[package.extras] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pymongo" +version = "4.3.3" +description = "Python driver for MongoDB " +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, + {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, + {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, + {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, + {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, + {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, + {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, + {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, + {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, + {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, + {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, + {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, + {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, + {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, + {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, + {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, + {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, + {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, + {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, + {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, + {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, + {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, + {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, + {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, + {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, +] + +[package.dependencies] +dnspython = ">=1.16.0,<3.0.0" + +[package.extras] +aws = ["pymongo-auth-aws (<2.0.0)"] +encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] +gssapi = ["pykerberos"] +ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +zstd = ["zstandard"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2022.7.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, +] + +[[package]] +name = "scipy" +version = "1.10.1" +description = "Fundamental algorithms for scientific computing in Python" +category = "main" +optional = false +python-versions = "<3.12,>=3.8" +files = [ + {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, + {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, + {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, + {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, + {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, + {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, + {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, + {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, + {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, + {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, + {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, + {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, + {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, + {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, +] + +[package.dependencies] +numpy = ">=1.19.5,<1.27.0" + +[package.extras] +dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "uncertainties" +version = "3.1.7" +description = "Transparent calculations with uncertainties on the quantities involved (aka error propagation); fast calculation of derivatives" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "uncertainties-3.1.7-py2.py3-none-any.whl", hash = "sha256:4040ec64d298215531922a68fa1506dc6b1cb86cd7cca8eca848fcfe0f987151"}, + {file = "uncertainties-3.1.7.tar.gz", hash = "sha256:80111e0839f239c5b233cb4772017b483a0b7a1573a581b92ab7746a35e6faab"}, +] + +[package.dependencies] +future = "*" + +[package.extras] +all = ["nose", "numpy", "sphinx"] +docs = ["sphinx"] +optional = ["numpy"] +tests = ["nose", "numpy"] + +[metadata] +lock-version = "2.0" +python-versions = ">3.10,< 3.12" +content-hash = "aaad37b7d989f5285689b9e2192360da2b496be23cf41eb128e7e616e07a203e" diff --git a/pyproject._toml b/pyproject._toml new file mode 100644 index 0000000..dbf27e1 --- /dev/null +++ b/pyproject._toml @@ -0,0 +1,33 @@ +[build-system] +requires = ["flit_core>=3.4"] +build-backend = "flit_core.buildapi" + +[project] +name = "paveit" +version = "0.0.1" +authors = [ + { name="Example Author", email="author@example.com" }, +] +description = "A small example package" +#readme = "README.md" +requires-python = ">=3.9" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +#[project.urls] +#"Homepage" = "https://github.com/pypa/sampleproject" +#"Bug Tracker" = "https://github.com/pypa/sampleproject/issues" + +###### + + +#[tool.poetry.dependencies] +#python = ">3.10,< 3.12" +#lmfit = "~1.1.0" +#pandas = "~1.5.3" +#numpy = "~1.24.2" +#scipy = "~1.10.0" +#mongoengine = "~0.26.0" \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..30e2d44 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,30 @@ +[metadata] +name = paveit +description = Analysis Pavment Test Data +author = Markus Clauß +author_email = markus.clauss@tu-dresden.de + +[options] +packages = find: +python_requires = >=3.9 +setup_requires = setuptools_scm +install_requires = + lmfit + pandas + numpy + scipy + matplotlib + seaborn + mongoengine + +[options.packages.find] +where=src + +[rstcheck] +report=warning +ignore_substitutions=release +ignore_roles=scipydoc,numpydoc +ignore_directives=autoclass,autodoc,autofunction,automethod,jupyter-execute,math + +[flake8] +ignore = E121,E123,E126,E226,W503,W504,E501,E731 \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..45f160d --- /dev/null +++ b/setup.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python + +import setuptools + +if __name__ == "__main__": + setuptools.setup() \ No newline at end of file diff --git a/src/paveit/__init__.py b/src/paveit/__init__.py new file mode 100644 index 0000000..6f4fb52 --- /dev/null +++ b/src/paveit/__init__.py @@ -0,0 +1,4 @@ +# main __init__.py +from .analysis import * +from .helper import * +from .labtest import * diff --git a/src/paveit/analysis/__init__.py b/src/paveit/analysis/__init__.py new file mode 100644 index 0000000..41fca71 --- /dev/null +++ b/src/paveit/analysis/__init__.py @@ -0,0 +1,9 @@ +from .regression import * + +__all__ = [ + # regession models + "fit_cos_simple", + "fit_cos", + #helper functions + "fit_cos_eval", +] diff --git a/src/paveit/analysis/regression.py b/src/paveit/analysis/regression.py new file mode 100644 index 0000000..72eb6ef --- /dev/null +++ b/src/paveit/analysis/regression.py @@ -0,0 +1,159 @@ +import lmfit as lm +import numpy as np +import scipy.special as sf +from scipy.optimize import curve_fit + + +def cosfunc(t, A, w, p, c, e): + """ + definition of the sin function + """ + + return A * np.cos(2 * np.pi * w * t + p) + e * t + c + + +def fit_cos_eval(x, par): + """ + + par: dict + fitting results + + """ + + ys = cosfunc(x, par['amp'], par['freq'], par['phase'], par['offset'], + par['slope']) + + return ys + + +def regression_sine_fft(): + """ + fast fourier transformation for sine data + """ + + return [] + + +def fit_cos_simple(x, y, freq=10.0): + """ + simple sine regression + + x: vector or list + y: vector or list + + freq: float + initial frequency of regression analysis + + + + """ + + tt = np.array(x) + yy = np.array(y) + + guess_offset = np.mean(yy) + offset_b = 0.4 * abs(guess_offset) + + guess_amp = abs(np.max(yy) - np.min(yy)) / 2.0 + + param_bounds = ([ + 0.3 * guess_amp, 0.99 * freq, -np.inf, guess_offset - offset_b, -np.inf + ], [1.3 * guess_amp, 1.01 * freq, np.inf, guess_offset + offset_b, np.inf]) + + popt, pcov = curve_fit(cosfunc, tt, yy, bounds=param_bounds) + + A, w, p, c, e = popt + + return { + "amp": A, + "freq": w, + "phase": p, + "offset": c, + "slope": e, + } + + +def fit_cos(x, y, freq=10.0, constfreq=False): + """ + sine regression + + x: vector or list + y: vector or list + + freq: float + initial frequency of regression analysis + + + + """ + + # step 1 + + res_step1 = fit_cos_simple(x, y, freq=freq) + + # step 2: lmfit + mod = lm.models.Model(cosfunc) + + mod.set_param_hint( + 'A', + value=res_step1['amp'], + #min=res_step1['amp'] - 0.5 * abs(res_step1['amp']), + #max=res_step1['amp'] + 0.5 * abs(res_step1['amp']) + ) + + mod.set_param_hint( + 'w', + value=freq, + vary=not constfreq, + #min=freq - 0.1 * freq, + #max=freq + 0.1 * freq, + ) + + mod.set_param_hint('p', value=res_step1['phase'], vary=True) + + mod.set_param_hint('c', value=res_step1['offset'], + vary=True) #, min = -0.5, max = 0.5) + + mod.set_param_hint('e', value=res_step1['slope'], vary=True) + + parms_fit = [ + mod.param_hints['A']['value'], mod.param_hints['w']['value'], + mod.param_hints['p']['value'], mod.param_hints['c']['value'], + mod.param_hints['e']['value'] + ] + + abweichung = [] + chis = [] + chis_red = [] + results = [] + r2 = [] + + methods = ['leastsq', 'powell'] + dof = len(y) - len(parms_fit) + + for method in methods: + result = mod.fit(y, t=x, method=method, verbose=False) + r2temp = 1 - result.residual.var() / np.var(y) + # r2temp = result.redchi / np.var(yfit, ddof=2) + if r2temp < 0.: + r2temp = 0 + r2.append(r2temp) + + chi = result.chisqr + chis_red.append(result.redchi) + abweichung.append(sf.gammaincc(dof / 2., chi / 2)) + chis.append(chi) + results.append(result) + + res = {} + best = np.nanargmax(r2) + + res[f'amp'] = results[best].best_values['A'] + res[f'freq'] = results[best].best_values['w'] + res[f'phase'] = results[best].best_values['p'] + res[f'offset'] = results[best].best_values['c'] + res[f'slope'] = results[best].best_values['e'] + + res[f'r2'] = r2[best] + + return res diff --git a/src/paveit/helper/__init__.py b/src/paveit/helper/__init__.py new file mode 100644 index 0000000..1b6763d --- /dev/null +++ b/src/paveit/helper/__init__.py @@ -0,0 +1,6 @@ +from .filehasher import calc_hash_of_bytes +from .minio import get_minio_client_archive, get_minio_client_processing + +__all__ = ['get_minio_client_archive', 'get_minio_client_processing', + 'calc_hash_of_bytes' + ] \ No newline at end of file diff --git a/src/paveit/helper/filehasher.py b/src/paveit/helper/filehasher.py new file mode 100644 index 0000000..47db53f --- /dev/null +++ b/src/paveit/helper/filehasher.py @@ -0,0 +1,22 @@ +import hashlib +from io import BytesIO + + +def calc_hash_of_bytes(buf: BytesIO): + """ calculate the hash of the file """ + + algo = hashlib.sha1() + + buffer_size = 65536 + buffer_size = buffer_size * 1024 * 1024 + + while True: + data = buf.read(buffer_size) + if not data: + break + algo.update(data) + + hex = algo.hexdigest() + + return hex + diff --git a/src/paveit/helper/minio.py b/src/paveit/helper/minio.py new file mode 100644 index 0000000..e9410cb --- /dev/null +++ b/src/paveit/helper/minio.py @@ -0,0 +1,40 @@ +import os + +from minio import Minio + + +def get_minio_client_processing(bucket_name = 'processing'): + + client = Minio( + os.environ["MINIO_URL"], + access_key=os.environ["MINIO_ACCESS_KEY"], + secret_key=os.environ["MINIO_SECRET_KEY"], + secure=False + ) + + + found = client.bucket_exists(bucket_name) + if not found: + client.make_bucket(bucket_name) + else: + pass + + return client + + +def get_minio_client_archive(bucket_name = 'archive'): + + client = Minio( + os.environ["MINIO_ARCHIVE_URL"], + access_key=os.environ["MINIO_ARCHIVE_ACCESS_KEY"], + secret_key=os.environ["MINIO_ARCHIVE_SECRET_KEY"], + secure=False + ) + + found = client.bucket_exists(bucket_name) + if not found: + client.make_bucket(bucket_name) + else: + pass + + return client \ No newline at end of file diff --git a/src/paveit/labtest/__init__.py b/src/paveit/labtest/__init__.py new file mode 100644 index 0000000..8d7d65e --- /dev/null +++ b/src/paveit/labtest/__init__.py @@ -0,0 +1,6 @@ +from .base import DataSineLoad +from .citt import CITTBase + +__all__ = ['DataSineLoad', + 'CITTBase' + ] \ No newline at end of file diff --git a/src/paveit/labtest/base.py b/src/paveit/labtest/base.py new file mode 100644 index 0000000..469f905 --- /dev/null +++ b/src/paveit/labtest/base.py @@ -0,0 +1,90 @@ +# coding: utf-8 +import io + +import pandas as pd +from paveit.helper import calc_hash_of_bytes, get_minio_client_processing + +from worker import app, logger + + +class DataSineLoad(): + """ + Base class for lab tests with sine load + + """ + + def __init__(self, filename:str , metadata: dict): + self.filename = filename + self.metadata = metadata + + self._logger = logger + + self._logger.info(f'filename s3: {self.filename}, metadata: {self.metadata}') + + + def _connect_to_s3(self): + self._logger.info('connect to db') + + self.__minioClient = get_minio_client_processing() + + def _read_from_s3_to_bytesio(self): + self._logger.info('read bytes') + + + try: + self._connect_to_s3() + response = self.__minioClient.get_object('processing', self.filename) + self.data = response.data + finally: + response.close() + response.release_conn() + + + self.data = io.BytesIO(self.data) + + def _calc_hash_of_bytesio(self): + + self.filehash = calc_hash_of_bytes(self.data) + self.data.seek(0) + self._logger.debug(f'Hash of file: {self.filehash}') + + + def _bytes_to_df(self): + self._logger.debug('convert bytes to pandas.DataFrame') + + encoding='utf-8' + self.df = pd.read_csv(self.data, encoding=encoding) + + def _calc(self): + self._logger.debug('calc data') + return self.df.mean().mean() + + def _archive_binary_data(self): + + self._logger.debug('send file to archive') + app.send_task('ArchiveFile', args=[self.filename, + self.metadata, + self.filehash, + 'org', + 'citt' + ], + queue='archive' + ) + + + + + def run(self): + self._logger.info('run task') + self._read_from_s3_to_bytesio() + self._calc_hash_of_bytesio() + + self._bytes_to_df() + + res = self._calc() + self._logger.debug(f'results: {res}') + + self._archive_binary_data() + + return res + diff --git a/src/paveit/labtest/citt.py b/src/paveit/labtest/citt.py new file mode 100644 index 0000000..6d85b3b --- /dev/null +++ b/src/paveit/labtest/citt.py @@ -0,0 +1,192 @@ +import io +import os +from csv import reader + +import numpy as np +import pandas as pd +from paveit.labtest import DataSineLoad + + +class CITTBase(DataSineLoad): + def _calc(self): + return (self.df.mean().mean(), self.df.max().max()) + +class CITT_KIT(DataSineLoad): + def _calc(self): + return (self.df.mean().mean(), self.df.max().max()) + + def _bytes_to_df(self): + logger.debug('convert bytes to pandas.DataFrame') + + self.data.seek(0) + with io.TextIOWrapper(self.data, encoding='latin-1') as read_obj: + csv_reader = reader(read_obj, delimiter=';') + + read = False + + data = [] + temp = [] + + for idx_row, row in enumerate(csv_reader): + if row == ['*****']: + + if read == False: + read = True + else: + read = False + data.append(temp) + + temp = [] + + continue + + if read: + + row = [r.replace(',', '.') for r in row] + + temp.append(row) + + #convert to pandas + + res = [] + + freqs = [10.0, 5.0, 1.0, 0.1, 10.0] + + for idx_data, d in enumerate(data): + + t = pd.DataFrame(d[3:]) + t.columns = d[1] + + freq = freqs[idx_data] + t['f'] = freq + + for col in t.columns: + t[col] = pd.to_numeric(t[col]) + + # add cycle number + dt = 1. / freq + + Nmax = int(np.ceil(t['ZEIT'].max() / dt)) + + N = np.zeros_like(t['ZEIT']) + + for i in range(Nmax): + if i == 0: + tmin = 0 + tmax = dt + else: + tmax = (i + 1) * dt + tmin = (i) * dt + + idx = t[(t['ZEIT'] >= tmin) & (t['ZEIT'] < tmax)].index + N[idx] = i + + + t['N'] = N + + res.append(t) + + #remove second 10 Hz + res = pd.concat(res[:-1]) + + res['T'] = self.temperature + #res = res.sort_values(['f', 'ZEIT']) + + #define in class + self.df = res.reset_index() + +class CITT_PTMDortmund(DataSineLoad): + def _calc(self): + return (self.df.mean().mean(), self.df.max().max()) + + def _bytes_to_df(self): + res = [] + + xl = pd.ExcelFile(self.data) + num_sheets = len(xl.sheet_names) + + print(num_sheets) + + diameter = [] + height = [] + + + for sheetid in range(num_sheets): + temp = pd.read_excel(self.data, sheetid, skiprows=97) + temp = temp.drop(index=0) + + #convert data to numerical data + for col in temp.columns: + temp[col] = pd.to_numeric(temp[col]) + + + #read metadata from file + + meta = pd.read_excel(self.data, sheetid, + skiprows=1, + nrows=90) + + meta = meta[meta.columns[[0, 2]]] + meta = meta.set_index( + meta.columns[0]).to_dict()[meta.columns[1]] + + temp['sigma'] = float(meta['Max. Spannung']) + temp['T'] = float(meta['Versuchstemperatur']) + freq = float(meta['Frequenz']) + dt = 1 / freq + temp['f'] = freq + + Nfrom = int(meta['Erster Aufzeichnungslastwechsel']) + Nto = int(meta['Letzer Aufzeichnungslastwechsel']) + + #add cycle number to dataframe + time_idx = temp['Zeitfolgen'].values + N = np.zeros_like(time_idx) + self._logger.debug(len(N)) + + self._logger.info(f'cycles from {Nfrom} to {Nto}') + + #BUG: Ist in Messdatei falsch definiert und wird von PTM angepasst. ''' + #for cycle in range(Nfrom, Nto+1): + for cycle in range(10): + + # time window + tmin = (cycle) * dt + tmax = (cycle + 1) * dt + + #filter data + idx = temp[(time_idx >= tmin) + & (time_idx < tmax)].index + + #FIX: siehe bug oben + if any(idx)>=500: + idx = idx[idx<500] + + #set cycle number + N[idx] = cycle + + temp['N'] = N + + # add diameter and height to list + diameter.append(float(meta['Durchmesser (mm)'])) + height.append(float(meta['Länge (mm)'])) + + #append data to final dataframe + res.append(temp) + + #concat all parts to single dataframe + res = pd.concat(res) + + # add data from speciment to metadata + + if not 'diameter' in self.metadata: + self.metadata['diameter'] = np.mean(diameter) + if not 'height' in self.metadata: + self.metadata['height'] = np.mean(height) + + #define in class + self.df = res.reset_index() + + # log infos + logger.debug(self.metadata) + logger.debug(self.df.head()) \ No newline at end of file diff --git a/src/paveit/labtest/sheartest.py b/src/paveit/labtest/sheartest.py new file mode 100644 index 0000000..2727746 --- /dev/null +++ b/src/paveit/labtest/sheartest.py @@ -0,0 +1,683 @@ +import os + +import lmfit as lm +import numpy as np +import pandas as pd +from paveit_worker.libs.labtests.base import DataSineLoad + +#import scipy.fft as sfft + + +#from pytestpavement.labtests.base import DataSineLoad +#from pytestpavement.models.data import DataSheartest +#from pytestpavement.models.sheartest import DynamicShearTestExtension + + +class ShearTest(DataSineLoad): + """ + Dynamic Shear Bounding Test + """ + + def __init__(self, + fname: str, + debug: bool = False, + gap_width: float = 1.0, + roundtemperature: bool = True, + archive_file=False, + s3_params: dict = {}): + + #set parameter + self.gap_width = gap_width + self.debug = debug + self.file = fname + self.roundtemperature = roundtemperature + self.archive_file = archive_file + self.s3_params = s3_params + + # process file + self._run() + + def plot_fited_data(self, opath=None, pkname=None, r2min=0.99): + + ylabel_dict = { + 'F': 'Kraft in N', + 's_vert_sum': 'norm. mittlerer Scherweg\n $S_{mittel}$ in mm', + 's_piston': 'norm. Kolbenweg\n in mm', + 's_vert_1': 'Scherweg\n $S_1$ in mm', + 's_vert_2': 'Scherweg\n $S_2$ in mm' + } + + columns_analyse = [ + 'F', + 's_vert_sum', + 's_vert_1', + 's_vert_2', + 's_piston', + ] + + if not (opath is None) & (pkname is None): + showplot = False + + opath = os.path.join(opath, pkname, 'raw_data') + if not os.path.exists(opath): + os.makedirs(opath) + + else: + showplot = True + + for i, fit in self.fit.iterrows(): + + if not any([fit['r2_F'] < r2min, fit['r2_s_vert_sum'] < r2min]): + continue + + data = self.data[int(fit['idx_data'])] + + if data is None: + continue + + freq = data['f'].unique()[0] + sigma = data['sigma_normal'].unique()[0] + s = data['extension'].unique()[0] + T = data['T'].unique()[0] + + fig, axs = plt.subplots(len(columns_analyse), + 1, + figsize=(8, len(columns_analyse) * 2), + sharex=True) + + for idxcol, col in enumerate(columns_analyse): + x, y = data.index, data[col] + + #add fit + f = self.fit.iloc[i] + parfit = {} + for k in ['amp', 'freq', 'phase', 'offset', 'slope']: + parfit[k] = f[f'fit_{k}_{col}'] + + yreg = fit_cos_eval(x, parfit) + + if col in ['s_piston', 's_vert_sum']: + y = y - np.mean(y) + yreg = yreg - np.mean(yreg) + + plt.sca(axs[idxcol]) + plt.plot(x, y, label='Messdaten') + + r2 = np.round(f[f'r2_{col}'], 3) + plt.plot(x, + yreg, + alpha=0.7, + label=f'Regression ($R^2 = {r2}$)') + + if not ('F' in col): + s = f['extension'] + parline = dict(lw=0.4, + ls='--', + color='lightgrey', + alpha=0.4, + label='Bereich des zul. Scherweges') + plt.axhspan(-s, s, **parline) + + if idxcol == len(columns_analyse) - 1: + plt.xlabel('Zeit in s') + + plt.ylabel(ylabel_dict[col]) + plt.legend() + + plt.tight_layout() + + if showplot: + plt.show() + break + + else: + ofile = f'{T}deg_{sigma}MPa_{freq}Hz_{s}mm'.replace('.', 'x') + ofile = os.path.join(opath, ofile + '.pdf') + + plt.savefig(ofile) + plt.close() + + +class ShearTestExtension(ShearTest): + + def runfit(self): + self._fit_data() + + def file_in_db(self): + + n = DynamicShearTestExtension.objects(filehash=self.filehash).count() + + if n > 0: + return True + else: + return False + + def save(self, material1, material2, bounding, meta: dict): + + for i, fit in self.fit.iterrows(): + + data = self.data[int(fit['idx_data'])] + + #check if data in db + n = DynamicShearTestExtension.objects( + f=fit['f'], + sigma_normal=fit['sigma_normal'], + T=fit['T'], + extension=fit['extension'], + material1=material1, + material2=material2, + bounding=bounding, + filehash=self.filehash, + ).count() + if n > 0: continue + + # save fit + + values = {} + for col in ['F', 's_vert_1', 's_vert_2', 's_vert_sum']: + values[f'fit_amp_{col}'] = fit[f'fit_amp_{col}'] + values[f'fit_freq_{col}'] = fit[f'fit_freq_{col}'] + values[f'fit_phase_{col}'] = fit[f'fit_phase_{col}'] + values[f'fit_offset_{col}'] = fit[f'fit_offset_{col}'] + values[f'fit_slope_{col}'] = fit[f'fit_slope_{col}'] + values[f'r2_{col}'] = fit[f'r2_{col}'] + + values.update(meta) + + try: + r = DynamicShearTestExtension( + #metadata + f=fit['f'], + sigma_normal=fit['sigma_normal'], + T=fit['T'], + extension=fit['extension'], + filehash=self.filehash, + material1=material1, + material2=material2, + bounding=bounding, + #results + stiffness=fit['G'], + # + **values).save() + + #save raw data + rdata = DataSheartest( + result_id=r.id, + time=data.index.values, + F=data['F'].values, + N=data['N'].values, + s_vert_1=data['s_vert_1'].values, + s_vert_2=data['s_vert_2'].values, + s_vert_sum=data['s_vert_sum'].values, + s_piston=data['s_piston'].values, + ).save() + + except: + print('error saving data') + raise + rdata.delete() + + if self.archive_file: + mclient = MinioClient(self.s3_params['S3_URL'], + self.s3_params['S3_ACCESS_KEY'], + self.s3_params['S3_SECRET_KEY'], + bucket=str(meta['org_id'])) + + extension = os.path.splitext(self.file)[-1] + ofilename = self.filehash + extension + outpath = 'sheartest' + + metadata_s3 = { + 'project_id': str(meta['project_id']), + 'user_id': str(meta['user_id']), + 'filename': os.path.split(self.file)[-1], + 'speciment': meta['speciment_name'] + } + + mclient.compress_and_upload_file(self.file, + ofilename, + outpath=outpath, + content_type="application/raw", + metadata=metadata_s3) + + def _set_parameter(self): + + self.split_data_based_on_parameter = [ + 'T', 'sigma_normal', 'f', 'extension' + ] + + self.col_as_int = ['N'] + self.col_as_float = ['T', 'F', 'f', 's_vert_sum'] + + self.val_col_names = ['time', 'T', 'f', 'N', 'F', 's_vert_sum'] + # Header names after standardization; check if exists + self.val_header_names = ['speciment_diameter'] + + self.columns_analyse = [ + 'F', 's_vert_sum', 's_vert_1', 's_vert_2', 's_piston' + ] + + self.number_of_load_cycles_for_analysis = 5 + + def _calc_missiong_values(self): + + cols = self.data.columns + + for c in ['vert']: + if not f's_{c}_sum' in cols: + self.data[f's_{c}_sum'] = self.data[[f's_{c}_1', f's_{c}_2' + ]].sum(axis=1).div(2.0) + + def _fit_data(self): + + self.fit = [] + + for idx_data, data in enumerate(self.data): + + if data is None: continue + + data.index = data.index - data.index[0] + + res = {} + res['idx_data'] = int(idx_data) + + # Fitting + freq = float(np.round(data['f'].mean(), 4)) + if (self.debug): + sigma_normal = np.round(data['sigma_normal'].mean(), 3) + T = np.round(data['T'].mean(), 3) + + for idxcol, col in enumerate(self.columns_analyse): + + if not col in data.columns: continue + + x = data.index.values + y = data[col].values + + # Fourier Transformation + """ + dt = np.diff(x).mean() #mean sampling rate + n = len(x) + + res[f'psd_{col}'] = sfft.rfft(y) #compute the FFT + res[f'freq_{col}'] = sfft.rfftfreq(n, dt) + """ + + res_fit = fit_cos(x, y, freq=freq, constfreq=True) + + res[f'r2_{col}'] = res_fit['r2'] + + res[f'fit_amp_{col}'] = res_fit['amp'] + res[f'fit_freq_{col}'] = res_fit['freq'] + res[f'fit_phase_{col}'] = res_fit['phase'] + res[f'fit_offset_{col}'] = res_fit['offset'] + res[f'fit_slope_{col}'] = res_fit['slope'] + + ## Schersteifigkeit berechnen + deltaF = res['fit_amp_F'] + deltaS = res['fit_amp_s_vert_sum'] + + A = np.pi * self.meta['speciment_diameter']**2 / 4 + tau = deltaF / A + gamma = deltaS / self.gap_width + + res['G'] = tau / gamma + + #metadaten + for c in ['T', 'extension', 'sigma_normal', 'f']: + res[c] = data[c][0] + + self.fit.append(res) + + if (self.debug) & (len(self.fit) > 5): + break + + self.fit = pd.DataFrame.from_records(self.fit) + + def plot_results(self, opath=None, pkname=None, r2min=0.96): + if not (opath is None) & (pkname is None): + showplot = False + + opath = os.path.join(opath, pkname) + if not os.path.exists(opath): + os.makedirs(opath) + else: + showplot = True + + dfplot = self.fit.copy() + for col in ['extension', 'fit_amp_s_vert_sum']: + dfplot[col] = dfplot[col].mul(1000) + + fig, ax = plt.subplots() + + xticks = list(dfplot['extension'].unique()) + + df = dfplot + df = df[(df['r2_F'] >= r2min) & (df['r2_s_vert_sum'] >= r2min)] + + sns.scatterplot( + data=df, + x='fit_amp_s_vert_sum', + y='G', + hue='T', + ax=ax, + alpha=0.7, + #size=150, + size="G", + sizes=(50, 160), + edgecolor='k', + palette='muted', + zorder=10) + + df = dfplot + df = df[(df['r2_F'] < r2min) & (df['r2_s_vert_sum'] < r2min)] + + if not df.empty: + sns.scatterplot(data=df, + x='fit_amp_s_vert_sum', + y='G', + facecolor='grey', + alpha=0.5, + legend=False, + zorder=1, + ax=ax) + + ax.set_xlabel(r'gemessene Scherwegamplitude in $\mu m$') + ax.set_ylabel(r'Scherseteifigkeit in MPa/mm') + + ax.set_xticks(xticks) + ax.grid() + + if not showplot: + ofile = os.path.join(opath, 'shearstiffness.pdf') + + plt.savefig(ofile) + plt.show() + + def plot_stats(self, opath=None, pkname=None, r2min=0.96): + if not (opath is None) & (pkname is None): + showplot = False + + opath = os.path.join(opath, pkname) + if not os.path.exists(opath): + os.makedirs(opath) + else: + showplot = True + + dfplot = self.fit.copy() + for col in ['extension', 'fit_amp_s_vert_sum']: + dfplot[col] = dfplot[col].mul(1000) + + #r2 + + df = self.fit + + fig, axs = plt.subplots(1, 2, sharey=True, sharex=True) + + parscatter = dict(palette='muted', alpha=0.7, edgecolor='k', lw=0.3) + + # r2 + ax = axs[0] + sns.scatterplot(data=df, + x='fit_amp_s_vert_sum', + y='r2_F', + hue='T', + ax=ax, + **parscatter) + ax.set_ylabel('Bestimmtheitsmaß $R^2$') + ax.set_title('Kraft') + + ax = axs[1] + sns.scatterplot(data=df, + x='fit_amp_s_vert_sum', + y='r2_s_vert_sum', + hue='T', + legend=False, + ax=ax, + **parscatter) + ax.set_ylabel('$R^2$ (S_{mittel})') + ax.set_title('mittlerer Scherweg') + + for ax in axs.flatten(): + ax.grid() + ax.set_xlabel(r'gemessene Scherwegamplitude in $\mu m$') + + plt.tight_layout() + + if not showplot: + ofile = os.path.join(opath, 'stats_r2.pdf') + plt.savefig(ofile) + plt.show() + + +class ShearTestExtensionLaborHart(ShearTestExtension): + + def _define_units(self): + + self.unit_F = 1 / 1000.0 #N + self.unit_t = 1 / 1000. #s + + def _set_units(self): + + #for col in ['F']: + # self.data[col] = self.data[col].mul(self.unit_F) + + for col in ['time']: + self.data[col] = self.data[col].mul(self.unit_t) + + return True + + def _read_data(self): + """ + read data from Labor Hart + """ + + # parameter + encoding = 'latin-1' + skiprows = 14 + hasunits = True + splitsign = ':;' + + # metadata from file + meta = {} + + with open(self.file, 'r', encoding=encoding) as f: + count = 0 + + for line in f: + count += 1 + + #remove whitespace + linesplit = line.strip() + linesplit = linesplit.split(splitsign) + + if len(linesplit) == 2: + + meta[linesplit[0]] = linesplit[1] + + if count >= skiprows: + break + + # data + data = pd.read_csv(self.file, + encoding=encoding, + skiprows=skiprows, + decimal=',', + sep=';') + + ## add header to df + with open(self.file, 'r', encoding=encoding) as f: + count = 0 + + for line in f: + count += 1 + + if count >= skiprows: + break + + head = line.split(';') + data.columns = head + + #clean data + data = data.dropna(axis=1) + + #define in class + self.meta = meta + self.data = data + return True + + def _standardize_meta(self): + + keys = list(self.meta.keys()) + for key in keys: + + if any(map(key.__contains__, ['Probenbezeichnung'])): + self.meta['speciment'] = self.meta.pop(key) + + elif any(map(key.__contains__, ['Datum/Uhrzeit'])): + self.meta['datetime'] = self.meta.pop(key) + try: + self.meta['datetime'] = pd.to_datetime( + self.meta['datetime']) + except: + pass + + elif any(map(key.__contains__, ['Probenhöhe'])): + self.meta['speciment_height'] = float( + self.meta.pop(key).replace(',', '.')) + elif any(map(key.__contains__, ['Probendurchmesser'])): + self.meta['speciment_diameter'] = float( + self.meta.pop(key).replace(',', '.')) + elif any(map(key.__contains__, ['Solltemperatur'])): + self.meta['temperature'] = float( + self.meta.pop(key).replace(',', '.')) + elif any(map(key.__contains__, ['Prüfbedingungen'])): + self.meta['test_version'] = self.meta.pop(key) + elif any(map(key.__contains__, ['Name des VersAblf'])): + self.meta['test'] = self.meta.pop(key) + elif any(map(key.__contains__, ['Prüfer'])): + self.meta['examiner'] = self.meta.pop(key) + + return True + + def _standardize_data(self): + + colnames = list(self.data.columns) + + for i, col in enumerate(colnames): + if col == 'TIME': + colnames[i] = 'time' + + #set values + elif col == 'Sollwert Frequenz': + colnames[i] = 'f' + elif col == 'SollTemperatur': + colnames[i] = 'T' + elif col == 'Max Scherweg': + colnames[i] = 'extension' + elif col == 'Sollwert Normalspannung': + colnames[i] = 'sigma_normal' + elif col == 'Impulsnummer': + colnames[i] = 'N' + + # measurements + + elif col == 'Load': + colnames[i] = 'F' + elif col == 'Position': + colnames[i] = 's_piston' + + elif col == 'VERTIKAL Links': + colnames[i] = 's_vert_1' + elif col == 'VERTIKAL Rechts': + colnames[i] = 's_vert_2' + + elif col == 'HORIZONTAL links': + colnames[i] = 's_hor_1' + + elif col == 'HOIZONTAL Rechts': + colnames[i] = 's_hor_2' + + self.data.columns = colnames + + +class ShearTestExtensionTUDresdenGeosys(ShearTestExtension): + + def _define_units(self): + + self.unit_S = 1 / 1000.0 #N + + def _set_units(self): + + for col in [ + 's_vert_sum', 's_vert_1', 's_vert_2', 's_piston', 'extension' + ]: + self.data[col] = self.data[col].mul(self.unit_S) + + #convert internal units to global + f = np.mean([0.9 / 355, 0.6 / 234.0, 0.3 / 116.0]) + + self.data['sigma_normal'] = self.data['sigma_normal'].mul(f).apply( + lambda x: np.round(x, 1)) + + return True + + def _read_data(self): + """ + read data from Labor Hart + """ + + # parameter + encoding = 'latin-1' + skiprows = 14 + hasunits = True + splitsign = ':;' + + head, data = read_geosys(self.file, '015') + + #define in class + self.meta = head + self.data = data + return True + + def _standardize_meta(self): + + keys = list(self.meta.keys()) + for key in keys: + + if key == 'd': + self.meta['speciment_diameter'] = self.meta.pop(key) + + return True + + def _standardize_data(self): + + colnames = list(self.data.columns) + + for i, col in enumerate(colnames): + + #set values + if col == 'soll temperature': + colnames[i] = 'T' + elif col == 'soll extension': + colnames[i] = 'extension' + elif col == 'soll sigma': + colnames[i] = 'sigma_normal' + elif col == 'soll frequency': + colnames[i] = 'f' + + elif col == 'Number of vertical cycles': + colnames[i] = 'N' + + # measurements + elif col == 'vertical load from hydraulic pressure': + colnames[i] = 'F' + elif col == 'vertical position from hydraulic pressure': + colnames[i] = 's_piston' + + elif col == 'Vertical position from LVDT 1': + colnames[i] = 's_vert_1' + elif col == 'Vertical position from LVDT 2': + colnames[i] = 's_vert_2' + + self.data.columns = colnames