Compare commits

81 Commits

Author SHA1 Message Date
75ca5bdd1c add archive as Paramter to Materials, optimize MaterialProperties 2023-09-11 20:21:57 +02:00
394d86c111 add Classes for Bitumen, Asphat, Aggregates, Additives and Properties of them 2023-09-11 17:35:41 +02:00
4b263f461a restructure materials, add additives 2023-09-06 14:50:40 +02:00
Markus Clauß
dd56a016c4 fix some import errors, step_before/after now single value no list 2023-07-03 09:01:45 +02:00
f482a72ca5 move function calc_E to functions/citt 2023-07-03 08:31:30 +02:00
28255116f2 change install to pip 517 2023-07-02 22:42:29 +02:00
c3effaecaf Merge branch 'develop' of git.paveit.de:paveit/lib-paveit into develop 2023-07-02 22:37:34 +02:00
5d2bff2c8d add first labwork tasks for flow 2023-07-02 22:37:33 +02:00
Markus Clauß
3b6eda29d5 Fatigue hinzugefügt, DSV angefangen 2023-06-30 14:12:37 +02:00
109df5bef1 remove test step in drone 2023-06-30 08:49:30 +02:00
23c6afd813 fix drone 2023-06-30 08:45:28 +02:00
a3be7bd0f4 update drone.yml 2023-06-30 08:42:30 +02:00
e6286cc799 add Env. Variables to .drone.yml 2023-06-30 08:40:47 +02:00
1b069a1528 add bugreport and messages 2023-06-28 22:12:36 +02:00
423ad74d23 remove print 2023-06-28 09:03:13 +02:00
04f5845ef1 add email and login date to datamodel 2023-06-28 09:02:58 +02:00
20a439d772 switch to keycload, add regression for citt 2023-06-27 11:08:25 +02:00
cd9bd08863 add regression citt, add new datafields to citt 2023-06-16 09:43:33 +02:00
959358e38f fix speciment name 2023-06-09 10:16:18 +02:00
ca25dcb71a alle Datenmodelle für Asphalte ergänzt 2023-06-07 15:08:36 +02:00
110400b7c1 add Category Enumeration for Asphalts, Remove category from Limits of Asphalt, add two new classes 2023-06-07 14:43:04 +02:00
f5e3a5e64e fix 2023-06-07 14:15:46 +02:00
60fd37488b add none as default 2023-06-07 14:14:59 +02:00
b694d89c0f fix letter in description 2023-06-07 14:13:04 +02:00
aac7dd729e Material PmB hinzugefügt 2023-06-07 14:03:36 +02:00
71e5aa864e Auswertung CITT Geosys One TU Dresden 2023-06-07 14:02:19 +02:00
Markus Clauß
f84d28a03c verwende syntethische Frequenz anstelle der falsch eingebetteten 2023-06-05 21:45:56 +02:00
Markus Clauß
ae5d918ee4 Ergänze Frequenz bei CITT TU Dresden aus Auswahlmenü 2023-06-05 21:44:45 +02:00
c991cf6bc0 remove raise 2023-06-05 20:45:57 +02:00
Markus Clauß
3344e8d27a fix read data from geosys 2023-06-05 20:42:24 +02:00
19dfee6e24 remove org_id, fix error 2023-06-02 09:27:51 +02:00
09d264969d fix 2023-06-02 09:22:17 +02:00
f0ac4b999d add custom function to get config from machine 2023-06-02 09:21:23 +02:00
a27c94eb7c add more requests 2023-06-02 08:53:44 +02:00
4fe2c5f9fa remove infrastructure.py 2023-06-02 08:52:34 +02:00
684449d693 restructure machine data 2023-06-02 08:51:27 +02:00
591fced274 add date of manufacture to machines, small fixes 2023-05-26 11:20:38 +02:00
b30286387d add machine and calibration data 2023-05-26 08:26:00 +02:00
91412f7169 change definition of Asphalt as material 2023-05-25 10:11:16 +02:00
80c27604bb add list to transform objects 2023-05-24 16:26:21 +02:00
ba6291f393 mod enum for asphalts 2023-05-24 16:10:41 +02:00
0d265793da add range to sievepassage 2023-05-24 16:07:52 +02:00
91015cc2f2 Asphalttragschicht an TL-Asphalt 2023-05-24 15:56:25 +02:00
387d60eff2 helper recursice data fetch 2023-05-24 13:17:00 +02:00
38a083cdc3 add function to merge data and return as dict 2023-05-23 17:38:58 +02:00
549d9aec61 fix: remove missing import 2023-05-23 17:01:44 +02:00
b7d09737c7 change enumNorm to norm from db 2023-05-23 16:58:24 +02:00
c012248aa9 add Dokuemnt for Asphalt, mod norm_spezifications 2023-05-23 16:45:20 +02:00
f188dbc974 add norm for bitumen 2023-05-23 16:41:21 +02:00
2cdc049031 rename class 2023-05-23 16:35:19 +02:00
cff9285736 remove kind, spil class in subclasses 2023-05-23 16:34:55 +02:00
98bd72559f fix typo 2023-05-23 16:30:50 +02:00
47e2851526 month not required 2023-05-23 16:30:09 +02:00
0c8b78b5d0 fix duplicates 2023-05-23 16:27:36 +02:00
92d6b82502 Merge branch 'develop' of git.paveit.de:paveit/lib-paveit into develop 2023-05-23 16:24:22 +02:00
9361e1ce95 add Dataclass for norm documents 2023-05-23 16:24:19 +02:00
Markus Clauß
63599b7984 move BitumenParameter to data, add function to read all parameters 2023-05-23 15:47:50 +02:00
2bf4d1b6ea define supply_source as required, restructure 2023-05-23 13:58:03 +02:00
381a5542b1 add extra data to Aggregate 2023-05-23 13:55:58 +02:00
2f6ffc3baa change norm in Aggreage 2023-05-23 13:53:49 +02:00
45a87569d0 fix typo 2023-05-23 13:46:41 +02:00
12013936c2 change norm value from string to enum 2023-05-23 13:44:47 +02:00
9ade447c2d add more parameter to bitumen 2023-05-23 10:37:47 +02:00
d08d0d247c fix 2023-05-23 10:28:45 +02:00
4e5b3aa212 shift parameter and definition of Bitumen to material.py 2023-05-23 10:26:21 +02:00
9562ecabfa add norm to bitumen parameter and limits 2023-05-23 10:18:13 +02:00
6aedb1d1fd Norm und Name ergänzt 2023-05-23 10:03:19 +02:00
c6bfe0353c Klassen für Straßenbaubitumen und Gesteinskörnung hinzugefügt 2023-05-23 09:47:17 +02:00
2d319f7dcc add missing packages to installscript 2023-05-17 08:37:55 +02:00
b19a072819 remove cache 2023-05-17 08:36:14 +02:00
17ef9a5247 add pytest to pipeline 2023-05-17 08:34:52 +02:00
2181aa6732 add Drone Pipeline 2023-05-17 08:32:41 +02:00
a7c6640f36 add Drone Pipeline 2023-05-17 08:28:51 +02:00
fbff3734a2 change Datamodels 2023-05-17 08:18:55 +02:00
Markus Clauß
471fa8dabf Datenstrukturen und Auswertung angepasst 2023-03-17 14:54:19 +01:00
Markus Clauß
ecaf97bbb3 Schersteifigkeit für TUD und Labor Hart ergänzt. Auswertung läuft 2023-03-03 15:03:23 +01:00
Markus Clauß
e1dd4c7c00 Auswertung Schersteifigkeit Labor Hart ergänzt 2023-03-03 12:53:03 +01:00
Markus Clauß
1bbb560f31 Datenmodelle angepasst, einige Firmen in CITT übernommen 2023-03-02 17:31:39 +01:00
Markus Clauß
e5c9f6904c CITT Dortmund läuft, Tests hinzugefügt 2023-02-28 16:11:55 +01:00
Markus Clauß
e861dbf10e Base Model für CITT erstellt, PTM Dortmund ergänzt, Tests hinzugefügt 2023-02-28 13:56:11 +01:00
Markus Clauß
b248a7e9b1 clean folder 2023-02-27 17:09:19 +01:00
61 changed files with 5299 additions and 1144 deletions

16
.drone.yml Normal file
View File

@@ -0,0 +1,16 @@
kind: pipeline
type: docker
name: lib-paveit
environment:
MONGO_USER: ''
MONGO_PASSWD: ''
MONGO_URI: ''
MONGO_USER: ''
steps:
- name: test
image: python:3.11-buster
commands:
- pip install --no-cache-dir .
- pip install --no-cache-dir pytest

180
.gitignore vendored Executable file
View File

@@ -0,0 +1,180 @@
temp
.DS_Store
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# ---> VisualStudioCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix

5
Makefile Normal file → Executable file
View File

@@ -1,2 +1,5 @@
link: link:
pip install -e ./ pip install -e ./
test:
pytest -v -log_cli=True --log-cli-level=INFO tests

0
README.md Normal file → Executable file
View File

4
debug.csv Executable file
View File

@@ -0,0 +1,4 @@
,fit_F_amp,fit_F_freq,fit_F_phase,fit_F_offset,fit_F_slope,fit_F_r2,fit_F_max,fit_F_min,f,sigma,fit_s_hor_sum_amp,fit_s_hor_sum_freq,fit_s_hor_sum_phase,fit_s_hor_sum_offset,fit_s_hor_sum_slope,fit_s_hor_sum_r2,fit_s_hor_sum_max,fit_s_hor_sum_min,fit_s_hor_1_amp,fit_s_hor_1_freq,fit_s_hor_1_phase,fit_s_hor_1_offset,fit_s_hor_1_slope,fit_s_hor_1_r2,fit_s_hor_1_max,fit_s_hor_1_min,fit_s_hor_2_amp,fit_s_hor_2_freq,fit_s_hor_2_phase,fit_s_hor_2_offset,fit_s_hor_2_slope,fit_s_hor_2_r2,fit_s_hor_2_max,fit_s_hor_2_min,nu,E
0,1162.037522728264,0.09999816445250176,3.2731742438169205,1657.4959341169797,0.022890975975805593,0.9999709812370754,2822.8786686693848,498.4860405788809,0.1,0.2,0.004904662057765795,0.09994473426198426,3.274570732678786,0.004472897149678457,3.4796345898322193e-06,0.9995438125784065,0.009632119781608398,-0.00042915385165576136,0.0022048443407161134,0.0999473113711256,3.2789165848392394,0.002036487114427019,1.317283541472095e-06,0.9992245191638016,0.0043773692868893654,-0.00022888205421645047,0.0026998634649033275,0.0999425971739857,3.271026693390654,0.00243640933189622,2.1623427295265008e-06,0.9993713553565571,0.005254750494719032,-0.0002479555587344695,0.2983926664681502,2260.236445571626
1,1163.9861551163267,0.29999672326752724,3.271466866301432,1657.5773060905333,0.023592068619978698,0.999977491807627,2827.1702071859427,492.85935674606014,0.30003,0.2,0.004904630239776472,0.30002953724325576,3.261420279897325,0.004476978416102744,2.2128929628375675e-05,0.9997651921759285,0.009765634313234614,-0.0004482273561737665,0.0021960586065051407,0.300085988714776,3.2617587973425652,0.0020390391186955238,8.035203621628222e-06,0.9992996273163816,0.004420284672054908,-0.0002098085496983204,0.0027085993503841803,0.29998369085814713,3.2611491963027257,0.002437939646841411,1.4093566880537998e-05,0.9995179610005985,0.005354886393438715,-0.0002384188064754461,0.2983926664681502,2264.0413462626584
2,1173.2940951101361,3.0019781539143713,3.1127799064755783,1652.6775323274487,2.2793532011736803,0.9997118511163391,2828.2192499344346,494.76670719786375,3.003,0.2,0.004927618845400971,3.0012837674744888,3.1051127487990566,0.004715737141843021,-1.2305236334063097e-05,0.998488708969846,0.009899148844860886,-0.0004005435948787328,0.0022065238872148044,3.0014146858816817,3.110359353742398,0.0021183309358349563,-8.842607057128579e-06,0.9965020191798836,0.004558567579810768,-0.00018119829292129186,0.002721172122260612,3.0011630113467382,3.100932209486545,0.00259739494570079,-3.4648940648246214e-06,0.9979287207765057,0.0054359487876403795,-0.000257492310993479,0.2983926664681502,2271.499199111919
1 fit_F_amp fit_F_freq fit_F_phase fit_F_offset fit_F_slope fit_F_r2 fit_F_max fit_F_min f sigma fit_s_hor_sum_amp fit_s_hor_sum_freq fit_s_hor_sum_phase fit_s_hor_sum_offset fit_s_hor_sum_slope fit_s_hor_sum_r2 fit_s_hor_sum_max fit_s_hor_sum_min fit_s_hor_1_amp fit_s_hor_1_freq fit_s_hor_1_phase fit_s_hor_1_offset fit_s_hor_1_slope fit_s_hor_1_r2 fit_s_hor_1_max fit_s_hor_1_min fit_s_hor_2_amp fit_s_hor_2_freq fit_s_hor_2_phase fit_s_hor_2_offset fit_s_hor_2_slope fit_s_hor_2_r2 fit_s_hor_2_max fit_s_hor_2_min nu E
2 0 1162.037522728264 0.09999816445250176 3.2731742438169205 1657.4959341169797 0.022890975975805593 0.9999709812370754 2822.8786686693848 498.4860405788809 0.1 0.2 0.004904662057765795 0.09994473426198426 3.274570732678786 0.004472897149678457 3.4796345898322193e-06 0.9995438125784065 0.009632119781608398 -0.00042915385165576136 0.0022048443407161134 0.0999473113711256 3.2789165848392394 0.002036487114427019 1.317283541472095e-06 0.9992245191638016 0.0043773692868893654 -0.00022888205421645047 0.0026998634649033275 0.0999425971739857 3.271026693390654 0.00243640933189622 2.1623427295265008e-06 0.9993713553565571 0.005254750494719032 -0.0002479555587344695 0.2983926664681502 2260.236445571626
3 1 1163.9861551163267 0.29999672326752724 3.271466866301432 1657.5773060905333 0.023592068619978698 0.999977491807627 2827.1702071859427 492.85935674606014 0.30003 0.2 0.004904630239776472 0.30002953724325576 3.261420279897325 0.004476978416102744 2.2128929628375675e-05 0.9997651921759285 0.009765634313234614 -0.0004482273561737665 0.0021960586065051407 0.300085988714776 3.2617587973425652 0.0020390391186955238 8.035203621628222e-06 0.9992996273163816 0.004420284672054908 -0.0002098085496983204 0.0027085993503841803 0.29998369085814713 3.2611491963027257 0.002437939646841411 1.4093566880537998e-05 0.9995179610005985 0.005354886393438715 -0.0002384188064754461 0.2983926664681502 2264.0413462626584
4 2 1173.2940951101361 3.0019781539143713 3.1127799064755783 1652.6775323274487 2.2793532011736803 0.9997118511163391 2828.2192499344346 494.76670719786375 3.003 0.2 0.004927618845400971 3.0012837674744888 3.1051127487990566 0.004715737141843021 -1.2305236334063097e-05 0.998488708969846 0.009899148844860886 -0.0004005435948787328 0.0022065238872148044 3.0014146858816817 3.110359353742398 0.0021183309358349563 -8.842607057128579e-06 0.9965020191798836 0.004558567579810768 -0.00018119829292129186 0.002721172122260612 3.0011630113467382 3.100932209486545 0.00259739494570079 -3.4648940648246214e-06 0.9979287207765057 0.0054359487876403795 -0.000257492310993479 0.2983926664681502 2271.499199111919

375
poetry.lock generated
View File

@@ -1,375 +0,0 @@
# This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "asteval"
version = "0.9.29"
description = "Safe, minimalistic evaluator of python expression using ast module"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "asteval-0.9.29-py3-none-any.whl", hash = "sha256:134e42fc4790582f2f926999e59abb444fb491046ba396836962268aad8a68a5"},
{file = "asteval-0.9.29.tar.gz", hash = "sha256:ab98c61ba9394149c774ae7861497e9c32580301aa693ca19746997216c31fab"},
]
[package.extras]
all = ["Sphinx", "build", "coverage", "pytest", "pytest-cov", "twine"]
dev = ["build", "twine"]
doc = ["Sphinx"]
test = ["coverage", "pytest", "pytest-cov"]
[[package]]
name = "dnspython"
version = "2.3.0"
description = "DNS toolkit"
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"},
{file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"},
]
[package.extras]
curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"]
dnssec = ["cryptography (>=2.6,<40.0)"]
doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"]
doq = ["aioquic (>=0.9.20)"]
idna = ["idna (>=2.1,<4.0)"]
trio = ["trio (>=0.14,<0.23)"]
wmi = ["wmi (>=1.5.1,<2.0.0)"]
[[package]]
name = "future"
version = "0.18.3"
description = "Clean single-source support for Python 3 and 2"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"},
]
[[package]]
name = "lmfit"
version = "1.1.0"
description = "Least-Squares Minimization with Bounds and Constraints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "lmfit-1.1.0-py3-none-any.whl", hash = "sha256:29f0540f94b3969a23db2b51abf309f327af8ea3667443ac4cd93d07fdfdb14f"},
{file = "lmfit-1.1.0.tar.gz", hash = "sha256:a2755b708ad7bad010178da28f082f55cbee7a084a625b452632e2d77b5391fb"},
]
[package.dependencies]
asteval = ">=0.9.28"
numpy = ">=1.19"
scipy = ">=1.6"
uncertainties = ">=3.1.4"
[package.extras]
all = ["Pillow", "Sphinx", "build", "cairosvg", "check-wheel-contents", "codecov", "corner", "coverage", "dill", "emcee (>=3.0.0)", "flaky", "jupyter-sphinx (>=0.2.4)", "matplotlib", "numdifftools", "pandas", "pre-commit", "pycairo", "pytest", "pytest-cov", "sphinx-gallery (>=0.10)", "sphinxcontrib-svg2pdfconverter", "sympy", "twine"]
dev = ["build", "check-wheel-contents", "pre-commit", "twine"]
doc = ["Pillow", "Sphinx", "cairosvg", "corner", "dill", "emcee (>=3.0.0)", "jupyter-sphinx (>=0.2.4)", "matplotlib", "numdifftools", "pandas", "pycairo", "sphinx-gallery (>=0.10)", "sphinxcontrib-svg2pdfconverter", "sympy"]
test = ["codecov", "coverage", "flaky", "pytest", "pytest-cov"]
[[package]]
name = "mongoengine"
version = "0.26.0"
description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "mongoengine-0.26.0-py3-none-any.whl", hash = "sha256:020a0779d1830affc649f2760d8c408e998981f18898e425eb041915181d3a53"},
{file = "mongoengine-0.26.0.tar.gz", hash = "sha256:3f284bdcbe8d1a3a9b8ab7d3c3ed672d10b8fd2e545447cd1d75e40d6e978332"},
]
[package.dependencies]
pymongo = ">=3.4,<5.0"
[[package]]
name = "numpy"
version = "1.24.2"
description = "Fundamental package for array computing in Python"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"},
{file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"},
{file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"},
{file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"},
{file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"},
{file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"},
{file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"},
{file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"},
{file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"},
{file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"},
{file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"},
{file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"},
{file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"},
{file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"},
{file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"},
{file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"},
{file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"},
{file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"},
{file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"},
{file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"},
{file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"},
{file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"},
{file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"},
{file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"},
{file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"},
{file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"},
{file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"},
{file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"},
]
[[package]]
name = "pandas"
version = "1.5.3"
description = "Powerful data structures for data analysis, time series, and statistics"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
{file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"},
{file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"},
{file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"},
{file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"},
{file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"},
{file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"},
{file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"},
{file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"},
{file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"},
{file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"},
{file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"},
]
[package.dependencies]
numpy = [
{version = ">=1.21.0", markers = "python_version >= \"3.10\""},
{version = ">=1.23.2", markers = "python_version >= \"3.11\""},
]
python-dateutil = ">=2.8.1"
pytz = ">=2020.1"
[package.extras]
test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
[[package]]
name = "pymongo"
version = "4.3.3"
description = "Python driver for MongoDB <http://www.mongodb.org>"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"},
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"},
{file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"},
{file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"},
{file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"},
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"},
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"},
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"},
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"},
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"},
{file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"},
{file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"},
{file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"},
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"},
{file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"},
{file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"},
{file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"},
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"},
{file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"},
{file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"},
{file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"},
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"},
{file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"},
{file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"},
{file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"},
]
[package.dependencies]
dnspython = ">=1.16.0,<3.0.0"
[package.extras]
aws = ["pymongo-auth-aws (<2.0.0)"]
encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"]
gssapi = ["pykerberos"]
ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
snappy = ["python-snappy"]
zstd = ["zstandard"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pytz"
version = "2022.7.1"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
]
[[package]]
name = "scipy"
version = "1.10.1"
description = "Fundamental algorithms for scientific computing in Python"
category = "main"
optional = false
python-versions = "<3.12,>=3.8"
files = [
{file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"},
{file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"},
{file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"},
{file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"},
{file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"},
{file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"},
{file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"},
{file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"},
{file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"},
{file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"},
{file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"},
{file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"},
{file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"},
{file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"},
{file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"},
{file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"},
{file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"},
{file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"},
{file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"},
{file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"},
{file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"},
]
[package.dependencies]
numpy = ">=1.19.5,<1.27.0"
[package.extras]
dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"]
doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"]
test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "uncertainties"
version = "3.1.7"
description = "Transparent calculations with uncertainties on the quantities involved (aka error propagation); fast calculation of derivatives"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "uncertainties-3.1.7-py2.py3-none-any.whl", hash = "sha256:4040ec64d298215531922a68fa1506dc6b1cb86cd7cca8eca848fcfe0f987151"},
{file = "uncertainties-3.1.7.tar.gz", hash = "sha256:80111e0839f239c5b233cb4772017b483a0b7a1573a581b92ab7746a35e6faab"},
]
[package.dependencies]
future = "*"
[package.extras]
all = ["nose", "numpy", "sphinx"]
docs = ["sphinx"]
optional = ["numpy"]
tests = ["nose", "numpy"]
[metadata]
lock-version = "2.0"
python-versions = ">3.10,< 3.12"
content-hash = "aaad37b7d989f5285689b9e2192360da2b496be23cf41eb128e7e616e07a203e"

View File

@@ -1,33 +0,0 @@
[build-system]
requires = ["flit_core>=3.4"]
build-backend = "flit_core.buildapi"
[project]
name = "paveit"
version = "0.0.1"
authors = [
{ name="Example Author", email="author@example.com" },
]
description = "A small example package"
#readme = "README.md"
requires-python = ">=3.9"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
#[project.urls]
#"Homepage" = "https://github.com/pypa/sampleproject"
#"Bug Tracker" = "https://github.com/pypa/sampleproject/issues"
######
#[tool.poetry.dependencies]
#python = ">3.10,< 3.12"
#lmfit = "~1.1.0"
#pandas = "~1.5.3"
#numpy = "~1.24.2"
#scipy = "~1.10.0"
#mongoengine = "~0.26.0"

6
pyproject.toml Normal file
View File

@@ -0,0 +1,6 @@
[build-system]
requires = [
"setuptools >= 40.8.0",
"wheel"
]
build-backend = "setuptools.build_meta"

3
setup.cfg Normal file → Executable file
View File

@@ -16,6 +16,9 @@ install_requires =
matplotlib matplotlib
seaborn seaborn
mongoengine mongoengine
statsmodels
toml
minio
[options.packages.find] [options.packages.find]
where=src where=src

View File

@@ -1,6 +0,0 @@
#!/usr/bin/env python
import setuptools
if __name__ == "__main__":
setuptools.setup()

1
src/paveit/__init__.py Normal file → Executable file
View File

@@ -1,4 +1,5 @@
# main __init__.py # main __init__.py
from .analysis import * from .analysis import *
from .functions import *
from .helper import * from .helper import *
from .labtest import * from .labtest import *

0
src/paveit/analysis/__init__.py Normal file → Executable file
View File

0
src/paveit/analysis/regression.py Normal file → Executable file
View File

View File

@@ -0,0 +1,19 @@
from .calibration import *
from .citt import *
from .components import *
from .data import *
from .enumeration import *
from .labworks import *
from .machines import *
from .material import *
from .material_properties import *
from .messages import *
from .metrics import *
from .norm_documents import *
from .norm_specification import *
from .project import *
from .regression import *
from .sheartest import *
from .taskmanager import *
from .usermanagement import *
from .workpackage import *

View File

@@ -0,0 +1,91 @@
import datetime
from mongoengine import *
from paveit.datamodels.components import Components
from paveit.helper import fetch_recursive, mongo_to_dict
from .usermanagement import Organisation
class Calibrarion(Document):
""" Durchführung von Kalibrierungen der Prüfmittel, externer Dienst """
company = StringField(required=True)
name = StringField(required=True)
department = StringField(required=False)
room = StringField(required=False)
serialnumber = StringField(required=False)
extrainfo = StringField(required=False)
component = LazyReferenceField(Components, required=True)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
tags = ListField(StringField())
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'calibration',
"db_alias": 'dblabtests',
'indexes': [
[("name", 1)],
]
}
#####################
class Monitoring(Document):
""" Eigenüberwachung der Prüfmittel, interne Durchführung """
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
component = LazyReferenceField(Components, required=True)
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
tags = ListField(StringField())
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'calibration',
"db_alias": 'dblabtests',
'indexes': [
[("name", 1)],
]
}

181
src/paveit/datamodels/citt.py Executable file
View File

@@ -0,0 +1,181 @@
import datetime
from mongoengine import *
from .taskmanager import TaskManagerBase
class CyclicIndirectTensileTest(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
standard = StringField(default='TP Asphalt Teil 24')
#org_id = LazyReferenceField(Organisation, required=True)
#user_id = LazyReferenceField(User,
# required=True,
# reverse_delete_rule=DO_NOTHING)
task_id = LazyReferenceField(TaskManagerBase, required=True)
tags = ListField(StringField())
filehash = StringField(required=True)
speciment_name = StringField(required=True, default=None)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'lab_citt',
"db_alias": 'dblabtests',
}
class CITTSiffnessResults(CyclicIndirectTensileTest):
#metadata
f_set = FloatField()
sigma_set = FloatField()
T_set = FloatField()
speciment_diameter = FloatField(required=True)
speciment_height = FloatField(required=True)
N_from = IntField()
N_to = IntField()
N_tot = IntField()
n_samples_per_cycle = IntField()
#results
stiffness = FloatField()
nu = FloatField()
phase = FloatField()
el_strains = FloatField()
#required parameter
## F
F_amp = FloatField()
F_freq = FloatField()
F_phase = FloatField()
F_offset = FloatField()
F_slope = FloatField()
F_r2 = FloatField()
F_cycle_min = ListField(FloatField())
F_min = FloatField()
F_min_std = FloatField()
F_min_diff_rel = FloatField()
F_cycle_max = ListField(FloatField())
F_max = FloatField()
F_max_std = FloatField()
F_max_diff_rel = FloatField()
F_cycle_mean = ListField(FloatField())
F_mean = FloatField()
F_mean_std = FloatField()
F_mean_diff_rel = FloatField()
F_cycle_diff = ListField(FloatField())
F_diff = FloatField()
F_diff_std = FloatField()
F_diff_diff_rel= FloatField()
## S1
s_hor_1_amp = FloatField()
s_hor_1_freq = FloatField()
s_hor_1_phase = FloatField()
s_hor_1_offset = FloatField()
s_hor_1_slope = FloatField()
s_hor_1_r2 = FloatField()
s_hor_1_cycle_min = ListField(FloatField())
s_hor_1_min = FloatField()
s_hor_1_min_std = FloatField()
s_hor_1_min_diff_rel = FloatField()
s_hor_1_cycle_max = ListField(FloatField())
s_hor_1_max = FloatField()
s_hor_1_max_std = FloatField()
s_hor_1_max_diff_rel = FloatField()
s_hor_1_cycle_mean = ListField(FloatField())
s_hor_1_mean = FloatField()
s_hor_1_mean_std = FloatField()
s_hor_1_mean_diff_rel = FloatField()
s_hor_1_cycle_diff = ListField(FloatField())
s_hor_1_diff = FloatField()
s_hor_1_diff_std = FloatField()
s_hor_1_diff_diff_rel = FloatField()
## S2
s_hor_2_amp = FloatField()
s_hor_2_freq = FloatField()
s_hor_2_phase = FloatField()
s_hor_2_offset = FloatField()
s_hor_2_slope = FloatField()
s_hor_2_r2 = FloatField()
s_hor_2_cycle_min = ListField(FloatField())
s_hor_2_min = FloatField()
s_hor_2_min_std = FloatField()
s_hor_2_min_diff_rel = FloatField()
s_hor_2_cycle_max = ListField(FloatField())
s_hor_2_max = FloatField()
s_hor_2_max_std = FloatField()
s_hor_2_max_diff_rel = FloatField()
s_hor_2_cycle_mean = ListField(FloatField())
s_hor_2_mean = FloatField()
s_hor_2_mean_std = FloatField()
s_hor_2_mean_diff_rel = FloatField()
s_hor_2_cycle_diff = ListField(FloatField())
s_hor_2_diff = FloatField()
s_hor_2_diff_std = FloatField()
s_hor_2_diff_diff_rel = FloatField()
## S-Sum
s_hor_sum_amp = FloatField()
s_hor_sum_freq = FloatField()
s_hor_sum_phase = FloatField()
s_hor_sum_offset = FloatField()
s_hor_sum_slope = FloatField()
s_hor_sum_r2 = FloatField()
s_hor_sum_cycle_min = ListField(FloatField())
s_hor_sum_min = FloatField()
s_hor_sum_min_std = FloatField()
s_hor_sum_min_diff_rel = FloatField()
s_hor_sum_cycle_max = ListField(FloatField())
s_hor_sum_max = FloatField()
s_hor_sum_max_std = FloatField()
s_hor_sum_max_diff_rel = FloatField()
s_hor_sum_cycle_mean = ListField(FloatField())
s_hor_sum_mean = FloatField()
s_hor_sum_mean_std = FloatField()
s_hor_sum_mean_diff_rel = FloatField()
s_hor_sum_cycle_diff = ListField(FloatField())
s_hor_sum_diff = FloatField()
s_hor_sum_diff_std = FloatField()
s_hor_sum_diff_diff_rel = FloatField()
#optional parameter
## Piston
s_piston_amp = FloatField(required=False)
s_piston_freq = FloatField(required=False)
s_piston_phase = FloatField(required=False)
s_piston_offset = FloatField(required=False)
s_piston_slope = FloatField(required=False)
s_piston_r2 = FloatField(required=False)
s_piston_cycle_min = ListField(FloatField(),required=False)
s_piston_min = FloatField(required=False)
s_piston_min_std = FloatField(required=False)
s_piston_min_diff_rel = FloatField(required=False)
s_piston_cycle_max = ListField(FloatField(),required=False)
s_piston_max = FloatField(required=False)
s_piston_max_std = FloatField(required=False)
s_piston_max_diff_rel = FloatField(required=False)
s_piston_cycle_mean = ListField(FloatField(),required=False)
s_piston_mean = FloatField(required=False)
s_piston_mean_std = FloatField(required=False)
s_piston_mean_diff_rel = FloatField(required=False)
s_piston_cycle_diff = ListField(FloatField(),required=False)
s_piston_diff = FloatField(required=False)
s_piston_diff_std = FloatField(required=False)
s_piston_diff_diff_rel = FloatField(required=False)

48
src/paveit/datamodels/client.py Executable file
View File

@@ -0,0 +1,48 @@
from mongoengine import *
import datetime
from .usermanagement import Organisation, User
class Client(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation, required=True)
user_id = LazyReferenceField(User,
required=True,
reverse_delete_rule=DO_NOTHING)
name = StringField(max_length=100)
name_short = StringField(max_length=100)
customer_id = StringField(max_length=100)
address_country = StringField(max_length=100, default='Germany')
address_road = StringField(max_length=100)
address_plz = StringField(max_length=5)
address_city = StringField(max_length=100)
vat_id = StringField(max_length=100) #Umsatzsteuer
billing_country = StringField(max_length=100, default='Germany')
billing_road = StringField(max_length=100)
billing_plz = StringField(max_length=5)
billing_city = StringField(max_length=100)
billing_addition = StringField(max_length=100)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'clients',
"db_alias": 'dblabtests',
}

View File

@@ -0,0 +1,67 @@
import datetime
from mongoengine import *
from paveit.helper import fetch_recursive, mongo_to_dict
from .usermanagement import Organisation
class Components(Document):
company = StringField(required=True)
name = StringField(required=True)
department = StringField(required=False)
room = StringField(required=False)
serialnumber = StringField(required=True) # Seriennummer
internalnumber = StringField(required=False) # Interne Bezeichnung
extrainfo = StringField(required=False)
year_manufacture = IntField(min=1900, max=2100, required=False)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
tags = ListField(StringField())
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'components',
"db_alias": 'dblabtests',
'indexes': [
[("name", 1)],
]
}
# Servohydraulik
class ComponentsServoHydraulicMachineTemperatureControl(Components):
""" Kraftmessdosen """
pass
class ComponentsServoHydraulicMachineKMD(Components):
""" Kraftmessdosen """
pass
class ComponentsServoHydraulicMachineLVDT(Components):
""" Wegaufnehmer """
pass

87
src/paveit/datamodels/data.py Executable file
View File

@@ -0,0 +1,87 @@
import datetime
from mongoengine import *
from .citt import CyclicIndirectTensileTest
from .sheartest import DynamicShearTest
class RawSinData(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
time = ListField(FloatField())
F = ListField(FloatField())
N = ListField(IntField())
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'data_rawsine',
"db_alias": 'dblabtests',
}
class RawData(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'data_raw',
"db_alias": 'dblabtests',
}
class DataSheartest(RawSinData):
#results
result = LazyReferenceField(DynamicShearTest,
required=True,
reverse_delete_rule=CASCADE)
# data
s_vert_1 = ListField(FloatField())
s_vert_2 = ListField(FloatField())
s_piston = ListField(FloatField(), required=False)
s_hor_1 = ListField(FloatField(), required=False)
s_hor_2 = ListField(FloatField(), required=False)
class CITTSiffness(RawSinData):
result = LazyReferenceField(CyclicIndirectTensileTest,
required=True,
reverse_delete_rule=CASCADE)
# data
s_hor_1 = ListField(FloatField())
s_hor_2 = ListField(FloatField())
s_hor_sum = ListField(FloatField())
s_piston = ListField(FloatField(), required=False)
# Single Data Points
class BitumenParameterStrassenbaubitumen(RawData):
penetration = FloatField(min_value=0, max_value=1000)
softening_point = FloatField(min_value=0, max_value =500)
flash_point = FloatField(min_value=0, max_value=500)
solubility = FloatField(default=99.0, min_value=0, max_value=100)
fraass_breaking_point = FloatField(min_value=-100, max_value=100)
hardening_resistance_penetration = FloatField(min_value=0, max_value=100)
hardening_resistance_softening_point= FloatField(min_value=0, max_value=100)
hardening_resistance_masschange = FloatField(min_value=0, max_value=100)

View File

@@ -0,0 +1,113 @@
import datetime
from enum import Enum
from mongoengine import *
from paveit.helper import fetch_recursive, mongo_to_dict
class ModelSelection(Enum):
"""
Welche Module sind in der App verfügbar
"""
BASE = 'base'
ADVANCED = 'advanced'
class ProcessStatusEnum(Enum):
"""Status eines Prozesses wie Projekt, Task
Ongoing: Ein fortlaufender Prozess, der noch nicht abgeschlossen ist.
In progress: Der Prozess ist aktiv und befindet sich in Arbeit.
Stalled: Der Prozess ist ins Stocken geraten oder vorübergehend gestoppt.
Completed: Der Prozess ist abgeschlossen oder beendet.
Pending: Der Prozess steht noch aus oder wurde noch nicht gestartet.
Suspended: Der Prozess wurde vorübergehend oder dauerhaft ausgesetzt.
Initiated: Der Prozess wurde gestartet oder eingeleitet.
Advanced: Der Prozess hat einen hohen Grad an Fortschritt oder Entwicklung erreicht.
Delayed: Der Prozess wurde verzögert und läuft hinter dem Zeitplan zurück.
Finalized: Der Prozess wurde abgeschlossen, und alle Details sind geklärt.
"""
INITIATED='initiated'
ONGOING = 'ongoing'
COMPLETED = 'completed'
ARCHIVE='archive'
class RelationalOperatorsEnum(Enum):
between = 'between'
lt = 'less than or equal to'
gt = 'greater than or equal to'
class BitumenCategoryEnum(Enum):
Strassenbau = "Straßenbaubitumen"
PmbA = "Elastomermodifizierte Bitumen"
PmbC = "Plastomermodifizierte Bitumen"
class AsphaltCategoryEnum(Enum):
ATS = "Asphalttragschichtmischgut"
ABS = "Asphaltbindermischgut"
ADS = "Asphaltdeckschichtmischgut"
SMA = "Splittmastixasphalt"
MA = "Gussasphalt"
PA = "Offenporiger Asphalt"
ACTD = 'Asphalttragdeckschichtmischgut'
class TaskType(Enum):
SINGLE = "single"
FLOW = "flow"
class LabtestsEnum(Enum):
# Performance
CITTStiffness = 'CITTStiffness'
SHEARStiffness = 'SheartestStiffness'
class Config(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
tags = ListField(StringField())
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'enumeration',
"db_alias": 'dblabtests',
'indexes': [
[("material", 1)],
[("name", 1)],
]
}
class Labtest(Config):
""" Speicherung der Zuordnung zwischen Firma und Labortest. Die hier
definierten Tests können die Kunden verwenden. Somit muss erstmal jeder
Einzeltest für jeden Kunden neu angelegt werden, jedoch ermöglicht dies auch
eine Steuerung der verfügbaren Module. Die Versuche werden nachfolgend in
Klasse/Kategorien eingeteil, um eine Trennung in Module zu ermöglichen."""
test = EnumField(LabtestsEnum, required=True)
modul = ListField(EnumField(ModelSelection, required=True, default=ModelSelection.BASE))
typ = EnumField(TaskType, required=True, default=TaskType.SINGLE)
class LabtestPerformAsphalt(Labtest):
""" Performanceprüfung Asphalt """
pass
class LabtestPerformBitumen(Labtest):
""" Performanceprüfung Bitumen """
pass

View File

@@ -0,0 +1,28 @@
from mongoengine import *
from .taskmanager import TaskManagerBase
# Vorbereitungen Performance Untersuchungen
class LabworksDrillRoad(TaskManagerBase):
pass
class LabworksMakingAsphaltSlabs(TaskManagerBase):
pass
class LabworksDrillAsphaltSlabs(TaskManagerBase):
pass
class LabworksSawDrillCores(TaskManagerBase):
pass
class LabworksGrindingAsphaltSamples(TaskManagerBase):
pass
class LabworksDetermineDensity(TaskManagerBase):
pass
class LabworksDetermineGeometry(TaskManagerBase):
pass

View File

@@ -0,0 +1,93 @@
import datetime
from bson import ObjectId
from mongoengine import *
from paveit.helper import fetch_recursive, mongo_to_dict
from .components import (
ComponentsServoHydraulicMachineKMD,
ComponentsServoHydraulicMachineLVDT,
ComponentsServoHydraulicMachineTemperatureControl,
)
from .enumeration import Labtest, LabtestsEnum
from .usermanagement import Organisation
# ??? Labtest: Ist das richtig hier?
class Experiment(EmbeddedDocument):
test = LazyReferenceField(Labtest, required=True)
config = ListField(StringField(), required=True)
class MachineBase(Document):
company = StringField(required=True)
name = StringField(required=True)
department = StringField(required=False)
room = StringField(required=False)
serialnumber = StringField(required=True)
extrainfo = StringField(required=False)
year_manufacture = IntField(min=1900, max=2100, required=False)
tests = ListField(EmbeddedDocumentField(Experiment), required=True)
# Standartkomponenten festlegen: wenn ortsveränderlich, dann leer lassen
component_temperature = LazyReferenceField(ComponentsServoHydraulicMachineTemperatureControl, required=True)
component_kmd = LazyReferenceField(ComponentsServoHydraulicMachineKMD, required=False)
component_lvdt = ListField(LazyReferenceField(ComponentsServoHydraulicMachineLVDT), required=False)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
tags = ListField(StringField())
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
def get_config(self, testname: LabtestsEnum):
test_id = Labtest.objects(org_id=self.org_id, test=testname).first()
if not test_id:
return []
test_id = test_id.id
config_array = []
for test in self.tests:
if test.test.id == test_id:
config_array = test.config
return config_array
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'machines',
"db_alias": 'dblabtests',
'indexes': [
[("name", 1)],
]
}
class ServoHydraulicMachine(MachineBase):
pass

116
src/paveit/datamodels/material.py Executable file
View File

@@ -0,0 +1,116 @@
import datetime
from re import T
from bson.json_util import loads
from mongoengine import *
from paveit.helper import fetch_recursive, mongo_to_dict
from .enumeration import AsphaltCategoryEnum, BitumenCategoryEnum
from .norm_documents import (
NormDocumentAggregate,
NormDocumentAsphalt,
NormDocumentBitumen,
)
from .norm_specification import DeliveryGrain, EnumerateBase, AdditiveEnum
from .project import Project
from .usermanagement import Organisation, User
class Material(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
user_id = LazyReferenceField(User,
required=True,
reverse_delete_rule=DO_NOTHING)
project_ids = ListField(LazyReferenceField(Project,
reverse_delete_rule=CASCADE),
required=False)
archived = BooleanField(default=False)
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'materials',
"db_alias": 'dblabtests',
'indexes': [
[("material_id", 1)],
[("name", 1)],
]
}
class Asphalt(Material):
pass
#limits = LazyReferenceField(EnumerateBase)
# Bitumen
class Bitumen(Material):
pass
#norm = LazyReferenceField(NormDocumentBitumen, required=True)
#limits = LazyReferenceField(EnumerateBase)
#ce_marking = StringField(required=False) #CE Kennzeichen
class Bitumenemulsion(Material):
norm = StringField(required=False, default='TP Asphalt Teil 24')
limits = LazyReferenceField(EnumerateBase)
ce_marking = StringField(required=False) #CE Kennzeichen
class Epoxy(Material):
norm = StringField(required=False, default='TP Asphalt Teil 24')
limits = LazyReferenceField(EnumerateBase)
ce_marking = StringField(required=False) #CE Kennzeichen
class Kompaktasphalt(Material):
norm = StringField(required=False, default='TP Asphalt Teil 24')
name = StringField()
ce_marking = StringField(required=False) #CE Kennzeichen
class Aggregate(Material):
pass
class Additive(Material):
category = LazyReferenceField(AdditiveEnum, required=True)
class Dummy(Material):
name = StringField()
material = StringField()
young_modulus = DictField()

View File

@@ -0,0 +1,275 @@
import datetime
from mongoengine import *
from paveit.helper import fetch_recursive, mongo_to_dict
from enum import Enum
from .material import Material, Additive, Bitumen, Aggregate
from .usermanagement import Organisation, User
from .enumeration import AsphaltCategoryEnum, BitumenCategoryEnum
from .norm_documents import (
NormDocumentAggregate,
NormDocumentAsphalt,
NormDocumentBitumen,
)
from .norm_specification import (
AsphaltParameterLimitsBaseEnum,
BitumenParameterLimitsBaseEnum,
AdditiveEnum,
AdditiveSubEnum,
DeliveryGrain
)
class ProcessParameters(Enum):
"""
Typ der Wert:
NOMINAL: Istwer
SET: Sollwert
"""
NOMINAL = 'nominal'
SET = 'set'
class Address(EmbeddedDocument):
company = StringField()
street = StringField()
number = IntField()
city = StringField()
postal_code = IntField()
country = StringField()
class Propertie(Document):
material_id = LazyReferenceField(Material,
required=True,
reverse_delete_rule=CASCADE)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'materialproperties',
"db_alias": 'dblabtests',
'indexes': [
[("material_id", 1)],
[("name", 1)],
]
}
# Additives
class PropertieAdditive(Propertie):
pass
class PropertieAdditiveMeta(PropertieAdditive):
name = StringField()
productnumber = StringField()
certificate_number = StringField()
certificate_date = IntField()
address = EmbeddedDocumentField(Address)
subcategory = LazyReferenceField(AdditiveSubEnum, required=True)
# Aggregates
class PropertieAggregate(Propertie):
pass
class PropertieAggregateMeta(PropertieAggregate):
name = StringField()
productnumber = StringField()
certificate_number = StringField()
certificate_date = IntField()
mine = StringField()
address = EmbeddedDocumentField(Address)
category = LazyReferenceField(DeliveryGrain, required=False)
# Bitumen
class PropertieBitumen(Propertie):
pass
class PropertieBitumenMeta(PropertieBitumen):
name = StringField()
productnumber = StringField()
certificate_number = StringField()
certificate_date = IntField()
address = EmbeddedDocumentField(Address)
category = LazyReferenceField(BitumenParameterLimitsBaseEnum, required=True)
# Asphate
class PropertieAsphalt(Propertie):
pass
class PropertieAsphaltMeta(PropertieAsphalt):
name = StringField()
productnumber = StringField()
recipenumber = StringField()
certificate_number = StringField()
certificate_date = IntField()
mixingplant = StringField()
address = EmbeddedDocumentField(Address)
category = LazyReferenceField(AsphaltParameterLimitsBaseEnum, required=True)
class Additive(EmbeddedDocument):
additive_id = LazyReferenceField(Additive, required=True)
A = FloatField()
B = FloatField()
C = FloatField()
min = FloatField()
max = FloatField()
class BitumenInfo(EmbeddedDocument):
added_binder_A = FloatField(required=False)
added_binder_B = FloatField(required=False)
added_binder_C = FloatField(required=False)
added_binder_min = FloatField(required=False)
added_binder_max = FloatField(required=False)
binder_from_additives_A = FloatField(required=False)
binder_from_additives_B = FloatField(required=False)
binder_from_additives_C = FloatField(required=False)
binder_from_additives_min = FloatField(required=False)
binder_from_additives_max = FloatField(required=False)
total_binder_A = FloatField(required=False)
total_binder_B = FloatField(required=False)
total_binder_C = FloatField(required=False)
total_binder_min = FloatField(required=False)
total_binder_max = FloatField(required=False)
total_binder_vol_A = FloatField(required=False)
total_binder_vol_B = FloatField(required=False)
total_binder_vol_C = FloatField(required=False)
total_binder_vol_min = FloatField(required=False)
total_binder_vol_max = FloatField(required=False)
elastic_recovery_A = FloatField(required=False)
elastic_recovery_B = FloatField(required=False)
elastic_recovery_C = FloatField(required=False)
elastic_recovery_min = FloatField(required=False)
elastic_recovery_max = FloatField(required=False)
equi_stiffness_A = FloatField(required=False)
equi_stiffness_B = FloatField(required=False)
equi_stiffness_C = FloatField(required=False)
equi_stiffness_min = FloatField(required=False)
equi_stiffness_max = FloatField(required=False)
phase_angle_A = FloatField(required=False)
phase_angle_B = FloatField(required=False)
phase_angle_C = FloatField(required=False)
phase_angle_min = FloatField(required=False)
phase_angle_max = FloatField(required=False)
class PropertieAsphaltBitumenParameters(PropertieAsphalt):
bitumen_id = LazyReferenceField(Bitumen, required=True)
bitumen = EmbeddedDocumentField(BitumenInfo)
additives = ListField(EmbeddedDocumentField(Additive))
class AggregateInfo(EmbeddedDocument):
id = LazyReferenceField(Aggregate, required=False)
size_45x0 = FloatField(required=False)
size_31x5 = FloatField(required=False)
size_22x4 = FloatField(required=False)
size_16x0 = FloatField(required=False)
size_11x2 = FloatField(required=False)
size_8x0 = FloatField(required=False)
size_5x6 = FloatField(required=False)
size_2x0 = FloatField(required=False)
size_1x0 = FloatField(required=False)
size_0x25 = FloatField(required=False)
size_0x125 = FloatField(required=False)
size_0x063 = FloatField(required=False)
size_less_0x063 = FloatField(required=False)
oversize = FloatField(required=False)
normal_grain = FloatField(required=False)
bulk_density = FloatField(required=False)
flow_coefficient = FloatField(required=False)
aggregates_bulk_density = FloatField(required=False)
class PropertieAsphaltAggregates(PropertieAsphalt):
aggregates = ListField(EmbeddedDocumentField(AggregateInfo), required=True)
class MineralMixtureInfo(EmbeddedDocument):
size_45x0 = FloatField(required=False)
size_31x5 = FloatField(required=False)
size_22x4 = FloatField(required=False)
size_16x0 = FloatField(required=False)
size_11x2 = FloatField(required=False)
size_8x0 = FloatField(required=False)
size_5x6 = FloatField(required=False)
size_2x0 = FloatField(required=False)
size_1x0 = FloatField(required=False)
size_0x25 = FloatField(required=False)
size_0x125 = FloatField(required=False)
size_0x063 = FloatField(required=False)
size_less_0x063 = FloatField(required=False)
class PropertieAsphaltMineralMixture(PropertieAsphalt):
trailing = EmbeddedDocumentField(MineralMixtureInfo, required=False)
passage = EmbeddedDocumentField(MineralMixtureInfo, required=False)
class PropertyMetrics(EmbeddedDocument):
A = FloatField(required=False)
B = FloatField(required=False)
C = FloatField(required=False)
min = FloatField(required=False)
max = FloatField(required=False)
class PropertiesAsphaltMixingParameters(PropertieAsphalt):
bulk_density = EmbeddedDocumentField(PropertyMetrics)
asphalt_density = EmbeddedDocumentField(PropertyMetrics)
marshall_density = EmbeddedDocumentField(PropertyMetrics)
cavity_content = EmbeddedDocumentField(PropertyMetrics)
cavity_filling_level = EmbeddedDocumentField(PropertyMetrics)
marshall_compression_temperature = EmbeddedDocumentField(PropertyMetrics)
expired_binder_quantity = EmbeddedDocumentField(PropertyMetrics)
water_sensitivity_ITSR = EmbeddedDocumentField(PropertyMetrics)
swelling_DIN1996 = EmbeddedDocumentField(PropertyMetrics)

View File

@@ -0,0 +1,32 @@
import datetime
from mongoengine import *
from .usermanagement import Organisation, User
class MessageBase(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
org_id = LazyReferenceField(Organisation, required=True)
user_id = LazyReferenceField(User, required=True)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'messages',
"db_alias": 'dblabtests',
}
class Bugreport(MessageBase):
message = StringField(max_length=300, required=True)
currentPage = StringField(required=True)

View File

@@ -0,0 +1,50 @@
import datetime
from mongoengine import *
from .project import Project
from .usermanagement import User, Organisation
from .workpackage import Workpackage
class MetricsBase(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
app = StringField(required=True, default='backend')
method = StringField(required=True)
url = StringField(required=False)
client = StringField(required=False)
status_code = IntField(required=False)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'metrics',
"db_alias": 'dblabtests',
}
class MetricsBackend(MetricsBase):
project_id = LazyReferenceField(Project, required=False)
user_id = LazyReferenceField(User,required=False)
workpackage_id = LazyReferenceField(Workpackage, required=False)
org_id = LazyReferenceField(Organisation, required=False)
runtime = FloatField() # in s
task = StringField(max_length=30)

View File

@@ -0,0 +1,36 @@
from mongoengine import *
from enum import Enum
class NormPublisherEnum(Enum):
FGSV = 'FGSV'
class NormDocument(Document):
name = StringField(required=True)
name_short = StringField(required=True)
publisher = EnumField(NormPublisherEnum, required=True, default=NormPublisherEnum.FGSV)
number = StringField(required=True)
year =IntField(min_value=1900, max_value=2099)
month = IntField(min_value=1, max_value=12, required=False)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'norm_documents',
"db_alias": 'dblabtests',
}
class NormDocumentAggregate(NormDocument):
pass
class NormDocumentBitumen(NormDocument):
pass
class NormDocumentAsphalt(NormDocument):
pass

View File

@@ -0,0 +1,225 @@
from mongoengine import *
from .enumeration import RelationalOperatorsEnum, BitumenCategoryEnum, AsphaltCategoryEnum
from .norm_documents import NormDocumentAggregate, NormDocumentBitumen, NormDocumentAsphalt
from paveit.helper import fetch_recursive, mongo_to_dict
class EnumerateBase(Document):
def to_dict(self):
# convert data to dict
data = fetch_recursive(self)
data = mongo_to_dict(data)
print(data)
return data
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'norm_specification',
"db_alias": 'dblabtests',
}
# Addidive
class AdditiveEnum(EnumerateBase):
name = StringField()
class AdditiveSubEnum(EnumerateBase):
categoryId = LazyReferenceField(AdditiveEnum, required=True)
name = StringField()
# Gesteine
class DeliveryGrain(EnumerateBase):
name = StringField()
category = StringField()
norm = LazyReferenceField(NormDocumentAggregate, required=True)
# Bitumen
class BitumenParameterLimitsBaseEnum(EnumerateBase):
pass
class BitumenParameterLimitsStrassenbaubitumen(BitumenParameterLimitsBaseEnum):
name = StringField()
category = EnumField(BitumenCategoryEnum, required=True)
norm = LazyReferenceField(NormDocumentBitumen, required=True)
penetration_unit = StringField('0.1 mm')
penetration_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.between)
penetration_max = FloatField(min_value=0, max_value=1000)
penetration_min = FloatField(min_value=0, max_value=1000)
# Erweichungspunkt Ring und Kugel
softening_point_unit = StringField('°C')
softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.between)
softening_point_min = FloatField(min_value=0, max_value=500)
softening_point_max = FloatField(min_value=0, max_value=500)
# Flammpunk
flash_point_unit = StringField('°C')
flash_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
flash_point_min = FloatField(min_value=0, max_value=500)
# Löslichkeit
solubility_unit = StringField('%')
solubility_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
solubility_min = FloatField(default=99.0, min_value=0, max_value=100)
# Brechpunkt nach Fraaß
fraass_breaking_point_unit = StringField('°C')
fraass_breaking_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
fraass_breaking_point_max = FloatField(min_value=-100, max_value=100)
# Beständigkeit gegen Verhärtung unter Einfluss von Wärme und Luft
## verbleibende Penetration
hardening_resistance_penetration_unit = StringField('%')
hardening_resistance_penetration_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
hardening_resistance_penetration_min = FloatField(min_value=0, max_value=100)
## Zunahme des Erweichungspunktes Ring und Kugel
hardening_resistance_softening_point_unit = StringField('°C')
hardening_resistance_softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
hardening_resistance_softening_point_max = FloatField(min_value=0, max_value=100)
## Massenänderung
hardening_resistance_masschange_unit = StringField('%')
hardening_resistance_masschange_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
hardening_resistance_masschange_max = FloatField(min_value=0, max_value=100)
class BitumenParameterLimitsPmB(BitumenParameterLimitsBaseEnum):
name = StringField()
category = EnumField(BitumenCategoryEnum, required=True)
norm = LazyReferenceField(NormDocumentBitumen, required=True)
penetration_unit = StringField('0.1 mm')
penetration_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.between)
penetration_max = FloatField(min_value=0, max_value=1000)
penetration_min = FloatField(min_value=0, max_value=1000)
# Erweichungspunkt Ring und Kugel
softening_point_unit = StringField('°C')
softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
softening_point_min = FloatField(min_value=0, max_value=500)
# Kraftduktilität
force_ductility_unit = StringField('J/cm²')
force_ductility_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
force_ductility_min = FloatField(min_value=0, max_value=10)
# Flammpunk
flash_point_unit = StringField('°C')
flash_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
flash_point_min = FloatField(min_value=0, max_value=500)
# Brechpunkt nach Fraaß
fraass_breaking_point_unit = StringField('°C')
fraass_breaking_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
fraass_breaking_point_max = FloatField(min_value=-100, max_value=100)
# Elastische Rückstellung 25 °C
elastic_recovery_25deg_unit = StringField('%')
elastic_recovery_25deg_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
elastic_recovery_25deg_min = FloatField(min_value=0, max_value=100, default=None)
# Elastische Rückstellung 10 °C
elastic_recovery_10deg_unit = StringField('%', default='%')
elastic_recovery_10deg_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
elastic_recovery_10deg_min = FloatField(min_value=0, max_value=100, default=None)
#Plastizitätsbereich
plasticity_range_unit = StringField('°C', default='°C')
plasticity_range_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
plasticity_range_min = FloatField(min_value=0, max_value=100, required=False, default=None)
# Lagerbeständigkeit Differenz Erweichungspunkt
storage_stability_softening_point_unit = StringField('°C')
storage_stability_softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
storage_stability_softening_point_max = FloatField(min_value=-100, max_value=100)
# Lagerbeständigkeit Penetration
storage_stability_penetration_unit = StringField('mm')
storage_stability_penetration_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
storage_stability_penetration_max = FloatField(min_value=-100, max_value=100)
# Beständigkeit gegen Verhärtung unter Einfluss von Wärme und Luft
## Massenänderung
hardening_resistance_masschange_unit = StringField('%')
hardening_resistance_masschange_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
hardening_resistance_masschange_max = FloatField(min_value=0, max_value=100)
## verbleibende Penetration
hardening_resistance_penetration_unit = StringField('%')
hardening_resistance_penetration_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
hardening_resistance_penetration_min = FloatField(min_value=0, max_value=100)
## Zunahme des Erweichungspunktes Ring und Kugel
hardening_resistance_softening_point_unit = StringField('°C')
hardening_resistance_softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
hardening_resistance_softening_point_max = FloatField(min_value=0, max_value=100)
## Abfall des Erweichungspunktes Ring und Kugel
hardening_resistance_decrease_softening_point_unit = StringField('°C')
hardening_resistance_decrease_softening_point_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.lt)
hardening_resistance__decreasesoftening_point_max = FloatField(min_value=0, max_value=100)
# Elastische Rückstellung 25 °C
hardening_resistance_elastic_recovery_25deg_unit = StringField('%')
hardening_resistance_elastic_recovery_25deg_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
hardening_resistance_elastic_recovery_25deg_min = FloatField(min_value=0, max_value=100, default=None)
# Elastische Rückstellung 10 °C
hardening_resistance_elastic_recovery_10deg_unit = StringField('%', default='%')
hardening_resistance_elastic_recovery_10deg_operator = EnumField(RelationalOperatorsEnum, default=RelationalOperatorsEnum.gt)
hardening_resistance_elastic_recovery_10deg_min = FloatField(min_value=0, max_value=100, required=False, default=None)
# Asphalt
class SievePassage(EmbeddedDocument):
wide = FloatField(required=True)
passage_min = FloatField(required=True)
passage_max = FloatField(required=True)
class AsphaltParameterLimitsBaseEnum(EnumerateBase):
pass
class AsphaltParameterLimitsAsphalttragschicht(AsphaltParameterLimitsBaseEnum):
name = StringField()
norm = LazyReferenceField(NormDocumentBitumen, required=True)
#Gestein
sieve_passage = ListField(EmbeddedDocumentField(SievePassage))
# Bitumen
bitumen_type = ListField(LazyReferenceField(BitumenParameterLimitsBaseEnum), required=True)
min_bitumen_content = FloatField(min=0, max=100)
min_voids_content = FloatField(min=0, max=100)
max_voids_content = FloatField(min=0, max=100)
class AsphaltParameterLimitsAsphaltbinderschicht(AsphaltParameterLimitsAsphalttragschicht):
pass
class AsphaltParameterLimitsAsphaltdeckschicht(AsphaltParameterLimitsAsphalttragschicht):
pass
class AsphaltParameterLimitsGussasphalt(AsphaltParameterLimitsAsphalttragschicht):
pass
class AsphaltParameterLimitsSMA(AsphaltParameterLimitsAsphalttragschicht):
pass
class AsphaltParameterLimitsPA(AsphaltParameterLimitsAsphalttragschicht):
pass
class AsphaltParameterLimitsACTD(AsphaltParameterLimitsAsphalttragschicht):
pass

View File

@@ -0,0 +1,53 @@
import datetime
from mongoengine import *
from .client import Client
from .usermanagement import Organisation, User
from .enumeration import ProcessStatusEnum
class Project(Document):
project_number = StringField(required=False)
client_id = LazyReferenceField(Client,
required=True,
reverse_delete_rule=CASCADE)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
status = EnumField(ProcessStatusEnum, default=ProcessStatusEnum.ONGOING)
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
user_id = LazyReferenceField(User,
required=True,
reverse_delete_rule=DO_NOTHING)
name = StringField(required=True)
name_short = StringField(required=False)
tags = ListField(StringField())
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'projects',
"db_alias": 'dblabtests',
'indexes': [
[("name_short", 1)],
[("client_id", 1)],
[("name", 1)],
[("project_number", 1)],
]
}

View File

@@ -0,0 +1,44 @@
import datetime
from mongoengine import *
from .taskmanager import TaskManagerBase
class RegressionBase(Document):
date = DateTimeField(default=datetime.datetime.now(),
wtf_options={"render_kw": {
"step": "60"
}})
task_id = LazyReferenceField(TaskManagerBase, required=True)
#statistische Werte
stat_r2 = FloatField(required=False)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'regression',
"db_alias": 'dblabtests',
}
class RegCITT(RegressionBase):
nsamples = IntField()
Emax = FloatField(min_value=0, max_value=150000)
Emin = FloatField()
T0 = FloatField(min_value=-100, max_value=100)
phi = FloatField()
z0 = FloatField()
z1 = FloatField()

View File

@@ -0,0 +1,237 @@
import datetime
from mongoengine import *
from .taskmanager import TaskManagerBase
from .usermanagement import User
class DynamicShearTest(Document):
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
task_id = LazyReferenceField(TaskManagerBase, required=True)
user_id = LazyReferenceField(User, required=True)
gap_width = FloatField(default=1.0)
tags = ListField(StringField())
standard = StringField(default='TP Asphalt Teil 48 C')
filehash = StringField(required=True)
speciment_name = StringField()
meta = {
'allow_inheritance':
True,
'index_opts': {},
'index_background':
True,
'index_cls':
False,
'auto_create_index':
True,
"db_alias":
'dblabtests',
'collection':
'lab_sheartest',
'indexes': [
[("lab", 1)],
[("speciment_name", 1)],
[("project", 1)],
[("bruch", 1)],
[("lab", 1), ("project", 1)],
[("lab", 1), ("project", 1), ("workpackage", 1)],
[("lab", 1), ("project", 1), ("bounding", 1)],
]
}
class DynamicShearTestExtension(DynamicShearTest):
#metadata
f_set = FloatField(required=True)
sigma_normal = FloatField(required=True)
T_set = FloatField(required=True)
extension = FloatField(required=True)
N_from = IntField()
N_to = IntField()
N_tot = IntField()
n_samples_per_cycle = IntField()
G = FloatField(required=True)
broken = BooleanField(required=True)
phase = FloatField()
#fit parameter
## required parameters
## F
F_amp = FloatField(required=True)
F_freq = FloatField(required=True)
F_phase = FloatField(required=True)
F_offset = FloatField(required=True)
F_slope = FloatField(required=True)
F_r2 = FloatField(required=True)
F_cycle_min = ListField(FloatField())
F_min = FloatField()
F_min_std = FloatField()
F_min_diff_rel = FloatField()
F_cycle_max = ListField(FloatField())
F_max = FloatField()
F_max_std = FloatField()
F_max_diff_rel = FloatField()
F_cycle_mean = ListField(FloatField())
F_mean = FloatField()
F_mean_std = FloatField()
F_mean_diff_rel = FloatField()
F_cycle_diff = ListField(FloatField())
F_diff = FloatField()
F_diff_std = FloatField()
F_diff_diff_rel= FloatField()
## S1
s_vert_1_amp = FloatField()
s_vert_1_freq = FloatField()
s_vert_1_phase = FloatField()
s_vert_1_offset = FloatField()
s_vert_1_slope = FloatField()
s_vert_1_r2 = FloatField()
s_vert_1_cycle_min = ListField(FloatField())
s_vert_1_min = FloatField()
s_vert_1_min_std = FloatField()
s_vert_1_min_diff_rel = FloatField()
s_vert_1_cycle_max = ListField(FloatField())
s_vert_1_max = FloatField()
s_vert_1_max_std = FloatField()
s_vert_1_max_diff_rel = FloatField()
s_vert_1_cycle_mean = ListField(FloatField())
s_vert_1_mean = FloatField()
s_vert_1_mean_std = FloatField()
s_vert_1_mean_diff_rel = FloatField()
s_vert_1_cycle_diff = ListField(FloatField())
s_vert_1_diff = FloatField()
s_vert_1_diff_std = FloatField()
s_vert_1_diff_diff_rel = FloatField()
## S2
s_vert_2_amp = FloatField()
s_vert_2_freq = FloatField()
s_vert_2_phase = FloatField()
s_vert_2_offset = FloatField()
s_vert_2_slope = FloatField()
s_vert_2_r2 = FloatField()
s_vert_2_cycle_min = ListField(FloatField())
s_vert_2_min = FloatField()
s_vert_2_min_std = FloatField()
s_vert_2_min_diff_rel = FloatField()
s_vert_2_cycle_max = ListField(FloatField())
s_vert_2_max = FloatField()
s_vert_2_max_std = FloatField()
s_vert_2_max_diff_rel = FloatField()
s_vert_2_cycle_mean = ListField(FloatField())
s_vert_2_mean = FloatField()
s_vert_2_mean_std = FloatField()
s_vert_2_mean_diff_rel = FloatField()
s_vert_2_cycle_diff = ListField(FloatField())
s_vert_2_diff = FloatField()
s_vert_2_diff_std = FloatField()
s_vert_2_diff_diff_rel = FloatField()
## S-Sum
s_vert_mean_amp = FloatField()
s_vert_mean_freq = FloatField()
s_vert_mean_phase = FloatField()
s_vert_mean_offset = FloatField()
s_vert_mean_slope = FloatField()
s_vert_mean_r2 = FloatField()
s_vert_mean_cycle_min = ListField(FloatField())
s_vert_mean_min = FloatField()
s_vert_mean_min_std = FloatField()
s_vert_mean_min_diff_rel = FloatField()
s_vert_mean_cycle_max = ListField(FloatField())
s_vert_mean_max = FloatField()
s_vert_mean_max_std = FloatField()
s_vert_mean_max_diff_rel = FloatField()
s_vert_mean_cycle_mean = ListField(FloatField())
s_vert_mean_mean = FloatField()
s_vert_mean_mean_std = FloatField()
s_vert_mean_mean_diff_rel = FloatField()
s_vert_mean_cycle_diff = ListField(FloatField())
s_vert_mean_diff = FloatField()
s_vert_mean_diff_std = FloatField()
s_vert_mean_diff_diff_rel = FloatField()
## optional parameters
## S1
s_hor_1_amp = FloatField(required=False)
s_hor_1_freq = FloatField(required=False)
s_hor_1_phase = FloatField(required=False)
s_hor_1_offset = FloatField(required=False)
s_hor_1_slope = FloatField(required=False)
s_hor_1_r2 = FloatField(required=False)
s_hor_1_cycle_min = ListField(FloatField(),required=False)
s_hor_1_min = FloatField(required=False)
s_hor_1_min_std = FloatField(required=False)
s_hor_1_min_diff_rel = FloatField(required=False)
s_hor_1_cycle_max = ListField(FloatField(),required=False)
s_hor_1_max = FloatField(required=False)
s_hor_1_max_std = FloatField(required=False)
s_hor_1_max_diff_rel = FloatField(required=False)
s_hor_1_cycle_mean = ListField(FloatField(),required=False)
s_hor_1_mean = FloatField(required=False)
s_hor_1_mean_std = FloatField(required=False)
s_hor_1_mean_diff_rel = FloatField(required=False)
s_hor_1_cycle_diff = ListField(FloatField(),required=False)
s_hor_1_diff = FloatField(required=False)
s_hor_1_diff_std = FloatField(required=False)
s_hor_1_diff_diff_rel = FloatField(required=False)
## S2
s_hor_2_amp = FloatField(required=False)
s_hor_2_freq = FloatField(required=False)
s_hor_2_phase = FloatField(required=False)
s_hor_2_offset = FloatField(required=False)
s_hor_2_slope = FloatField(required=False)
s_hor_2_r2 = FloatField(required=False)
s_hor_2_cycle_min = ListField(FloatField(),required=False)
s_hor_2_min = FloatField(required=False)
s_hor_2_min_std = FloatField(required=False)
s_hor_2_min_diff_rel = FloatField(required=False)
s_hor_2_cycle_max = ListField(FloatField(),required=False)
s_hor_2_max = FloatField(required=False)
s_hor_2_max_std = FloatField(required=False)
s_hor_2_max_diff_rel = FloatField(required=False)
s_hor_2_cycle_mean = ListField(FloatField(), required=False)
s_hor_2_mean = FloatField(required=False)
s_hor_2_mean_std = FloatField(required=False)
s_hor_2_mean_diff_rel = FloatField(required=False)
s_hor_2_cycle_diff = ListField(FloatField(), required=False)
s_hor_2_diff = FloatField(required=False)
s_hor_2_diff_std = FloatField(required=False)
s_hor_2_diff_diff_rel = FloatField(required=False)
## Piston
s_piston_amp = FloatField(required=False)
s_piston_freq = FloatField(required=False)
s_piston_phase = FloatField(required=False)
s_piston_offset = FloatField(required=False)
s_piston_slope = FloatField(required=False)
s_piston_r2 = FloatField(required=False)
s_piston_cycle_min = ListField(FloatField(),required=False)
s_piston_min = FloatField(required=False)
s_piston_min_std = FloatField(required=False)
s_piston_min_diff_rel = FloatField(required=False)
s_piston_cycle_max = ListField(FloatField(),required=False)
s_piston_max = FloatField(required=False)
s_piston_max_std = FloatField(required=False)
s_piston_max_dif_rel = FloatField(required=False)
s_piston_cycle_mean = ListField(FloatField(),required=False)
s_piston_mean = FloatField(required=False)
s_piston_mean_std = FloatField(required=False)
s_piston_mean_diff_rel = FloatField(required=False)
s_piston_cycle_diff = ListField(FloatField(),required=False)
s_piston_diff = FloatField(required=False)
s_piston_diff_std = FloatField(required=False)
s_piston_diff_diff_rel = FloatField(required=False)

View File

@@ -0,0 +1,64 @@
import datetime
from mongoengine import *
from .client import Client
from .enumeration import ProcessStatusEnum
from .machines import MachineBase
from .material import Material
from .project import Project
from .usermanagement import Organisation, User
from .workpackage import Workpackage
class TaskManagerBase(Document):
org_id = LazyReferenceField(Organisation, required=True)
user_id = LazyReferenceField(User, required=True)
client_id = LazyReferenceField(Client, required=True)
project_id = LazyReferenceField(Project, required=True)
wp_id = LazyReferenceField(Workpackage, required=False)
status = EnumField(ProcessStatusEnum, default=ProcessStatusEnum.ONGOING)
task_added = DateTimeField(default=datetime.datetime.now,
wtf_options={"r ender_kw": {
"step": "60"
}})
task_finished = DateTimeField(required=False)
assign_users = ListField(LazyReferenceField(User), required=False)
assign_machine = LazyReferenceField(MachineBase, required=False)
series = StringField(default='Serie 01')
step_before = LazyReferenceField('self', required=False)
step_after = LazyReferenceField('self', required=False)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'taskmanager',
"db_alias": 'dblabtests',
}
class TaskCITTStiffness(TaskManagerBase):
material = LazyReferenceField(Material, required=True)
class TaskCITTFatigue(TaskManagerBase):
material = LazyReferenceField(Material, required=True)
class TaskDynShearStiffness(TaskManagerBase):
material = LazyReferenceField(Material, required=True)
material2 = LazyReferenceField(Material, required=True)
bounding = LazyReferenceField(Material, required=True)

View File

@@ -0,0 +1,65 @@
import datetime
from mongoengine import *
from .enumeration import ModelSelection
class Organisation(Document):
name = StringField(required=True)
name_short = StringField(required=True)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
labtest_citt = StringField(required=False)
labtest_shear_extension = StringField(required=False)
domain = StringField(required=True)
modul = EnumField(ModelSelection, required=True, default=ModelSelection.BASE)
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'organisation',
'db_alias': 'dblabtests',
}
class User(Document):
_id = UUIDField(binary=True, primary_key=True)
org_id = LazyReferenceField(Organisation,
required=True,
reverse_delete_rule=CASCADE)
date_added = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
email = EmailField(required=True)
last_login = DateTimeField(default=datetime.datetime.now,
required=False,
wtf_options={"render_kw": {
"step": "60"
}})
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'user',
'db_alias': 'dblabtests',
}

View File

@@ -0,0 +1,34 @@
import datetime
from mongoengine import *
from .project import Project
from .usermanagement import User
class Workpackage(Document):
name = StringField(required=True)
name_short = StringField(required=False)
wp_id = StringField(required=True)
project_id = LazyReferenceField(Project, required=True)
user_id = LazyReferenceField(User,
required=False,
reverse_delete_rule=DO_NOTHING)
date = DateTimeField(default=datetime.datetime.now,
wtf_options={"render_kw": {
"step": "60"
}})
meta = {
'allow_inheritance': True,
'index_opts': {},
'index_background': True,
'index_cls': False,
'auto_create_index': True,
'collection': 'workpackages',
"db_alias": 'dblabtests',
}

View File

@@ -0,0 +1 @@
from .citt import *

103
src/paveit/functions/citt.py Executable file
View File

@@ -0,0 +1,103 @@
import numpy as np
def stiffness_tp26(T, f, Emax, Emin, phi, z0, z1, T0=20.0):
alphaT = np.exp(phi * ((1 / (T + 273.15)) - (1 / (T0 + 273.15))))
x = np.log(f * alphaT) / np.log(10)
E = Emin + (Emax - Emin) / (1 + np.exp(z0 * x + z1))
return E
def calc_nu(T):
#TODO: Prüfen ob Formel stimmt!
nu = 0.15 + (0.35) / (1 + np.exp(3.1849 - 0.04233 * (9 / 5 * T + 32)))
return nu
def calc_E(data, metadata, columns_analyse):
data.index = data.index - data.index[0]
res_temp = {}
x = data.index.values
freq = np.round(float(data['f'].unique()), 2)
sigma = float(data['sigma'].unique())
temperature = float(data['T'].unique())
for idxcol, col in enumerate(columns_analyse):
if not col in data.columns: continue
y = data[col].values
res = fit_cos(x, y, freq=freq)
for key, value in res.items():
res_temp[f'fit_{col}_{key}'] = value
# analyse cycle data
cycle_min = []
cycle_max = []
cycle_mean = []
cycle_diff = []
for N, data_cycle in data.groupby('N'):
y = data_cycle[col].values
cycle_min.append(y.min())
cycle_max.append(y.max())
cycle_mean.append(y.mean())
cycle_diff.append(cycle_max[-1] - cycle_min[-1])
res_temp[f'fit_{col}_cycle_min'] = cycle_min
res_temp[f'fit_{col}_min'] = np.mean(cycle_min)
res_temp[f'fit_{col}_min_std'] = np.std(cycle_min)
res_temp[f'fit_{col}_min_diff_rel'] = (np.max(cycle_min) - np.min(cycle_min))/np.mean(cycle_min)
res_temp[f'fit_{col}_cycle_max'] = cycle_max
res_temp[f'fit_{col}_max'] = np.mean(cycle_max)
res_temp[f'fit_{col}_max_std'] = np.std(cycle_max)
res_temp[f'fit_{col}_max_diff_rel'] = (np.max(cycle_max) - np.min(cycle_max))/np.mean(cycle_max)
res_temp[f'fit_{col}_cycle_mean'] = cycle_mean
res_temp[f'fit_{col}_mean'] = np.mean(cycle_mean)
res_temp[f'fit_{col}_mean_std'] = np.std(cycle_mean)
res_temp[f'fit_{col}_mean_diff_rel'] = (np.max(cycle_mean) - np.min(cycle_mean))/np.mean(cycle_mean)
res_temp[f'fit_{col}_cycle_diff'] = cycle_diff
res_temp[f'fit_{col}_diff'] = np.mean(cycle_diff)
res_temp[f'fit_{col}_diff_std'] = np.std(cycle_diff)
res_temp[f'fit_{col}_diff_diff_rel'] = (np.max(cycle_diff) - np.min(cycle_diff))/np.mean(cycle_diff)
# add more metadata
res_temp['f_set'] = freq
res_temp['sigma_set'] = sigma
res_temp['T_set'] = temperature
res_temp['N_from'] = data['N'].min()
res_temp['N_to'] = data['N'].max()
res_temp['n_samples_per_cycle'] = int(
len(data) / (res_temp['N_to'] - res_temp['N_from'] + 1))
## Stiffness
deltaF = res_temp['fit_F_amp']
deltaU = res_temp['fit_s_hor_sum_amp']
h = float(metadata['speciment_height'])
d = float(metadata['speciment_diameter'])
nu = calc_nu(temperature)
res_temp['nu'] = nu
#nach TP Asphalt 26
res_temp['stiffness'] = deltaF /(h * deltaU) * (4.0/np.pi -1 + nu)
## Elastische hori. Dehnung
res_temp['el_strains'] = 2*2*deltaU/d * (1+3*nu)/(4 + np.pi*nu - np.pi) * 1000.0 # 2*2 daher, da deltaU nur Ampl. nicht Gesamtkraft ist
# TODO: Überarbeiten und erweitern (ISSUE #2)
res_temp['phase'] = res_temp['fit_F_phase'] - res_temp['fit_s_hor_sum_phase']
return res_temp

10
src/paveit/helper/__init__.py Normal file → Executable file
View File

@@ -1,6 +1,10 @@
from .filehandling import read_file_to_bytesio
from .filehasher import calc_hash_of_bytes from .filehasher import calc_hash_of_bytes
from .minio import get_minio_client_archive, get_minio_client_processing from .minio import get_minio_client_archive, get_minio_client_processing
from .mongo import connect_mongo_db, fetch_recursive, mongo_get_results, mongo_to_dict
__all__ = ['get_minio_client_archive', 'get_minio_client_processing', __all__ = [
'calc_hash_of_bytes' 'read_file_to_bytesio', 'connect_mongo_db', 'mongo_get_results', 'fetch_recursive', 'mongo_to_dict',
] 'get_minio_client_processing', 'get_minio_client_archive',
'calc_hash_of_bytes'
]

View File

@@ -0,0 +1,12 @@
import logging
from io import BytesIO
logger = logging.getLogger(__name__)
def read_file_to_bytesio(filename: str):
with open(filename, "rb") as fh:
buf = BytesIO(fh.read())
return buf

0
src/paveit/helper/filehasher.py Normal file → Executable file
View File

4
src/paveit/helper/minio.py Normal file → Executable file
View File

@@ -9,7 +9,7 @@ def get_minio_client_processing(bucket_name = 'processing'):
os.environ["MINIO_URL"], os.environ["MINIO_URL"],
access_key=os.environ["MINIO_ACCESS_KEY"], access_key=os.environ["MINIO_ACCESS_KEY"],
secret_key=os.environ["MINIO_SECRET_KEY"], secret_key=os.environ["MINIO_SECRET_KEY"],
secure=False secure=True
) )
@@ -28,7 +28,7 @@ def get_minio_client_archive(bucket_name = 'archive'):
os.environ["MINIO_ARCHIVE_URL"], os.environ["MINIO_ARCHIVE_URL"],
access_key=os.environ["MINIO_ARCHIVE_ACCESS_KEY"], access_key=os.environ["MINIO_ARCHIVE_ACCESS_KEY"],
secret_key=os.environ["MINIO_ARCHIVE_SECRET_KEY"], secret_key=os.environ["MINIO_ARCHIVE_SECRET_KEY"],
secure=False secure=True
) )
found = client.bucket_exists(bucket_name) found = client.bucket_exists(bucket_name)

119
src/paveit/helper/mongo.py Executable file
View File

@@ -0,0 +1,119 @@
import os
import mongoengine
from bson import ObjectId
from mongoengine import connect as mongo_connect
from pandas import DataFrame
def connect_mongo_db(username=os.environ['MONGO_USER'] ,
password=os.environ['MONGO_PASSWD'] ,
host=os.environ['MONGO_URI'],
dbname=os.environ['MONGO_DB'] ):
c = mongo_connect(dbname,
username=username,
password=password,
host=host,
authentication_source='admin',
alias='dblabtests')
def mongo_upload_results(resultsmodel, results: DataFrame, datamodel,
data: DataFrame, filehash: str, org_id: ObjectId,
project_id: ObjectId, material_id: ObjectId,
user_id: ObjectId):
for idx, res in results.iterrows():
#upload results
meta['filehash'] = filehash
meta['org_id'] = org_id
meta['project_id'] = project_id
meta['material'] = material_id
meta['user_id'] = user_id
#check if result in db
#n = CITTSiffness.objects(**meta).count()
# write data
data_dict = res.to_dict()
data_dict.update(meta)
f = resultsmodel(**data_dict).save()
# upload data
data_sel = data[idx_fit]
# required data
data_out = dict(
time=data_sel.index,
F=list(data_sel['F']),
N=list(data_sel['N']),
s_hor_1=list(data_sel['s_hor_1']),
s_hor_2=list(data_sel['s_hor_2']),
s_hor_sum=list(data_sel['s_hor_sum']),
)
#optional data
for col in ['S_piston']:
if col in data_sel.columns:
data_out[col] = data_sel[col]
g = datamodel(result=f.id, **data_out).save()
def mongo_get_results(resultsmodel, results: DataFrame, datamodel,
data: DataFrame, filehash: str, org_id: ObjectId,
project_id: ObjectId, material_id: ObjectId,
user_id: ObjectId):
return True
def fetch_recursive(data, fetch_parameter=['norm', 'limits', 'assign_machine']):
fields = data._fields
data_out = data.to_mongo().to_dict()
for par in fetch_parameter:
if par in fields.keys():
try:
# if is LazyReferenceField
if isinstance(fields[par], mongoengine.fields.LazyReferenceField):
d = data[par].fetch()
else:
d = data[par]
except:
continue
if d is None:
continue
data_out[par] = d.to_mongo().to_dict()
return data_out
def mongo_to_dict(data, drop_parameters=['_cls','user_id', 'org_id', 'project_id']):
'''
data: dict
'''
for key in list(data.keys()):
if key in drop_parameters:
del data[key] # Remove the unwanted key
elif isinstance(data[key], dict):
mongo_to_dict(data[key]) # Recurse into nested dictionaries
elif isinstance(data[key], list): # Add this line to process lists
for i, item in enumerate(data[key]):
if isinstance(item, ObjectId):
data[key][i] = str(item)
elif isinstance(item, dict):
mongo_to_dict(item, drop_parameters) # Recurse into nested dictionaries in list
else:
# process data
if isinstance(data[key], ObjectId):
data[key] = str(data[key])
return data

3
src/paveit/io/__init__.py Executable file
View File

@@ -0,0 +1,3 @@
from .geosys import read_geosys
__all__ = ["read_geosys"]

249
src/paveit/io/geosys.py Executable file
View File

@@ -0,0 +1,249 @@
import csv
import os
from io import BytesIO
from sys import getsizeof
from numpy import array
from pandas import DataFrame
def detect_tabnum(filename, tabstr, encoding='utf-8'):
filename = os.path.normpath(filename)
tabstr = tabstr.lower()
#Einlesen
with open(filename, 'r', encoding=encoding) as inFile:
reader = csv.reader(inFile, delimiter='\t')
counter = 0
for row in reader:
row = [r.lower() for r in row]
if any(tabstr in mystring for mystring in row):
if 'plain' in row:
return row[1]
counter += 1
if counter > 100:
return False
def str2float(str):
try:
str = str.replace(',', '.')
return float(str)
except:
return None
def read_geosys(buffer: BytesIO,
table,
pkdata='001',
metadata_ids=['003', '015'],
encoding='utf-8',
to_si=False,
debug=False):
'''
:param buffer: Bytes IO Object
:param table: Table-Number
:param pkdata: Table-Number of speciment definitions, default: 1
:param encoding: Encoding, default: utf-8
:param debug: debug-mode
:return:
'''
try:
dictOut = {}
dictOut['durch'] = 0
dictOut['hoehe'] = 0
#---------------------------------------------------------------------
#Daten einlesen und umwandeln
#---------------------------------------------------------------------
#Einlesen
buffer.seek(0)
lines = buffer.readlines()
data = []
for line in lines:
try:
line = line.decode(encoding)
line = line.split('\t')
if len(line) > 2:
v = line[0][0:3]
if len(v) == 3:
if (table == v) or (pkdata == v) or (v in metadata_ids):
data.append(line)
except:
pass
if debug:
print('Anz. Datensätze: ', str(len(data)), getsizeof(data))
#aufräumen
##Datenstruktur anlegen
data_processed = {}
data_processed['head'] = []
data_processed['metadata'] = {}
data_processed['data'] = []
for i in metadata_ids:
data_processed['metadata'][i] = []
for idx, d in enumerate(data):
try:
v = d[0][0:3]
if v in pkdata: data_processed['head'].append(d)
if v in metadata_ids: data_processed['metadata'][v].append(d)
if v in table: data_processed['data'].append(d)
except:
pass
# replace object
data = data_processed
assert len(data['data']) != 0
if debug:
print('data_clean fin')
## Header aufbereiten
for idx, row in enumerate(data['head']):
if idx == 0:
id_durchmesser = None
id_hoehe = None
id_name = None
for idx_name, name in enumerate(row):
name_lower = name.lower()
if any(map(name_lower.__contains__, ['durchmesser'])):
id_durchmesser = idx_name
elif any(map(name_lower.__contains__, ['bezeichnung'])):
id_name = idx_name
elif any(map(name_lower.__contains__, ['höhe'])):
id_hoehe = idx_name
if debug:
print(id_durchmesser, id_hoehe, id_name)
elif idx == 1:
unit_durch = None
unit_hoehe = None
try:
unit_durch = row[id_durchmesser]
unit_hoehe = row[id_hoehe]
except:
pass
elif idx == 2:
durchmesser = None
hoehe = None
name = None
try:
durchmesser = str2float(row[id_durchmesser])
hoehe = str2float(row[id_hoehe])
name = row[id_name]
except:
pass
header = {
'speciment_diameter': durchmesser,
'speciment_height': hoehe,
'name': name,
'unit_h': unit_hoehe,
'unit_d': unit_durch
}
meta = data['metadata']
for key in meta.keys():
sel = meta[key]
assert len(sel[0]) == len(sel[2])
if len(sel) <= 3:
d = { sel[0][i]: sel[2][i].strip() for i in range(len(sel[0])) }
# Fix: In Geosys gibt es den Parameter Oberspannung zweimal. Erster entfernen
else:
d = { sel[0][i]: sel[3][i].strip() for i in range(len(sel[0])) }
header_append = d
header.update(header_append)
#Fix Frequenz: Ich muss dies in den Eingangsdaten der TUD anpassen
try:
l = 'Versuchsart\r\n'
header['Frequenz'] = float(header[l].split('Hz')[0].split('Steifigkeit')[1].strip().replace(',','.'))
except:
pass
if debug:
print('header\n', header)
# add metadata to header
## Daten in Pandas DataFrame umwandeln
if debug:
print('daten umwandel')
temp = []
for idx, row in enumerate(data['data']):
if idx == 0:
if debug:
print('convert head')
data_head = []
for idx_name, name in enumerate(row):
if idx_name <= 1: continue
data_head.append(name)
elif idx == 1:
data_units = []
for idx_name, name in enumerate(row):
if idx_name <= 1: continue
data_units.append(name)
else:
t = []
for idx_col, value in enumerate(row):
if idx_col <= 1:
continue
else:
t.append(str2float(value))
temp.append(t)
data = array(temp)
if debug:
print(data_head, data_units)
## Bezeichnungen der Daten normalisieren
# Pandas DataFrame erstellen
data = DataFrame(data=data, columns=data_head)
if debug:
print(data.head())
return header, data
except:
print('Fehler beim lesen')
raise

3
src/paveit/labtest/__init__.py Normal file → Executable file
View File

@@ -1,5 +1,8 @@
from .base import DataSineLoad from .base import DataSineLoad
from .citt import *
from .citt import CITTBase from .citt import CITTBase
from .citt_fatigue import *
from .dsv import *
__all__ = ['DataSineLoad', __all__ = ['DataSineLoad',
'CITTBase' 'CITTBase'

446
src/paveit/labtest/base.py Normal file → Executable file
View File

@@ -1,10 +1,13 @@
# coding: utf-8 # coding: utf-8
import io import io
import logging
import numpy as np
import pandas as pd import pandas as pd
from paveit.helper import calc_hash_of_bytes, get_minio_client_processing
from worker import app, logger from paveit.analysis import fit_cos
from paveit.functions import calc_nu
from paveit.helper import calc_hash_of_bytes, get_minio_client_processing
class DataSineLoad(): class DataSineLoad():
@@ -12,79 +15,412 @@ class DataSineLoad():
Base class for lab tests with sine load Base class for lab tests with sine load
""" """
def __init__(self, filename:str , metadata: dict): def __init__(self,
filename: str,
metadata: dict,
logger=None,
debug: bool = False,
data: None | io.BytesIO = None):
self.filename = filename self.filename = filename
self.metadata = metadata self.metadata = metadata
self._logger = logger if isinstance(data, io.BytesIO):
self.data = data
self._logger.info(f'filename s3: {self.filename}, metadata: {self.metadata}')
self.debug = debug
if logger == None:
self._logger = logging.getLogger(__name__)
else:
self._logger = logger
self._logger.info(
f'filename s3: {self.filename}, metadata: {self.metadata}')
self._pre_run()
def _set_parameter(self):
self._logger.debug('run _set_parameter')
self.split_data_based_on_parameter = ['T', 'sigma', 'f']
self.col_as_int = ['N']
self.col_as_float = [
'T', 'F', 's_piston', 's_hor_1', 'f', 's_hor_1', 's_hor_2'
]
self.val_col_names = [
'time', 'T', 'f', 'sigma', 'N', 'F', 's_hor_1', 's_hor_2', 's_piston'
]
self.columns_analyse = [
'F', 's_hor_sum', 's_hor_1', 's_hor_2', 's_piston'
]
self.round_values = [('T', 3)]
# Header names after standardization; check if exists
self.val_header_names = ['speciment_height', 'speciment_diameter']
self.number_of_load_cycles_for_analysis = 5
self.meta_names_of_parameter = {
'sigma': ['Max. Spannung']
} #list of names
self.data_column_names = {
'time': ['Time Series'],
'F': ['Load Series'],
's_hor_1': ['LVDT1 Series'],
's_hor_2': ['LVDT2 Series'],
}
def update_parameter():
""" update standard prameter from function self._set_parameter()"""
pass
def _define_units(self):
self.unit_s = 1 #mm
self.unit_F = 1 #N
self.unit_t = 1 / 1000. #s
def _connect_to_s3(self): def _connect_to_s3(self):
self._logger.info('connect to db') self._logger.debug('run _connect to db')
self.__minioClient = get_minio_client_processing() self.__minioClient = get_minio_client_processing()
def _read_from_s3_to_bytesio(self): def _read_from_s3_to_bytesio(self):
self._logger.info('read bytes') self._logger.debug('run _read bytes')
try: try:
self._connect_to_s3() self._connect_to_s3()
response = self.__minioClient.get_object('processing', self.filename) response = self.__minioClient.get_object('processing',
self.data = response.data self.filename)
self.data = response.data
finally: finally:
response.close() response.close()
response.release_conn() response.release_conn()
self.data = io.BytesIO(self.data) self.data = io.BytesIO(self.data)
self._logger.debug('read data from s3')
def _calc_hash_of_bytesio(self): def _calc_hash_of_bytesio(self):
self._logger.debug('run _calc_hash_of_bytesio')
self.filehash = calc_hash_of_bytes(self.data) self.filehash = calc_hash_of_bytes(self.data)
self.data.seek(0) self.data.seek(0)
self._logger.debug(f'Hash of file: {self.filehash}') self._logger.debug(f'Hash of file: {self.filehash}')
def _define_data_models(self):
pass
def _data_in_db(self):
nsamples = self._datamodel.objects(filehash = self.filehash).count()
if nsamples>0:
self.file_in_db = True
else:
self.file_in_db = False
def _process_data(self):
""" convert self.data (BytesIO) to pandas.DataFrame, update
self.metadata with informations from file """
def _bytes_to_df(self):
self._logger.debug('convert bytes to pandas.DataFrame') self._logger.debug('convert bytes to pandas.DataFrame')
encoding = 'utf-8'
self.data = pd.read_csv(self.data, encoding=encoding)
def _meta_to_float(self):
for key, d in self.metadata.items():
try:
#remove units
for unit in ["°C", 'Hz']:
if unit in d:
d = d.split(unit)[0].strip()
f = float(d.replace(',', '.'))
self.metadata[key] = f
except:
pass
def _standardize_data(self):
self._logger.debug('run _standardize_data')
colnames = list(self.data.columns)
for par, names in self.data_column_names.items():
for name in names:
colnames = [sub.replace(name, par) for sub in colnames]
self.data.columns = colnames
self._logger.debug(f'columns: {colnames}')
print(self.data.head())
self._logger.debug(f'standardize_data: {self.data.columns}')
def _standardize_meta(self):
self._logger.debug('run _standardize_meta')
# remove "\r\n" ending from Windows and whitespace
for col in list(self.metadata.keys()):
col_mod = col.replace('\r\n', '')
col_mod = col_mod.strip()
if col != col_mod:
self.metadata[col_mod] = self.metadata[col]
self.metadata.pop(col)
for par, names in self.meta_names_of_parameter.items():
for name in names:
if name in self.metadata:
self.metadata[par] = self.metadata[name]
self.metadata.pop(name)
break
encoding='utf-8' # stip data
self.df = pd.read_csv(self.data, encoding=encoding) for key in self.metadata.keys():
try:
self.metadata[key] = self.metadata[key].strip()
except:
pass
self._logger.debug(f'meta (stand.): {self.metadata}')
def _calc(self): def _modify_meta(self):
self._logger.debug('calc data') pass
return self.df.mean().mean()
def _validate_data(self):
def _archive_binary_data(self): self._logger.debug('run _validate_data')
self._logger.debug('send file to archive') for name in self.val_col_names:
app.send_task('ArchiveFile', args=[self.filename, if not name in self.data.columns:
self.metadata,
self.filehash, # check if value in metadata:
'org', if name in self.metadata.keys():
'citt' self._logger.error(f'add {name} from metadata to data')
], self.data[name] = self.metadata[name]
queue='archive'
) else:
self._logger.error(f'{name} not in data')
raise
self._logger.debug(f'validate_data: {self.data.columns}')
def _validate_meta(self):
self._logger.debug('run _validate_meta')
for name in self.val_header_names:
if not name in self.metadata:
self._logger.error(f'{name} not found')
raise
def _post_string_to_float(self):
sel = self.data.select_dtypes(include=['object'])
if sel.empty:
return
for col in sel.columns:
try:
self.data[col] = pd.to_numeric(self.data[col].str.replace(
',', '.'))
except:
pass
def _post_apply_units(self):
for col in [
's_hor_sum', 's_hor_1', 's_hor_2', 's_vert_sum', 's_vert_1',
's_vert_2', 's_piston', 'extension',
]:
if col in self.data.columns:
self.data[col] = self.data[col].mul(self.unit_s)
for col in ['F']:
self.data[col] = self.data[col].mul(self.unit_F)
for col in ['time']:
self.data[col] = self.data[col].mul(self.unit_t)
try:
self.data['f'] = self.data['f'].mul(self.unit_freq)
except:
pass
return True
def _post_round_values(self):
for par, digits in self.round_values:
if par in self.data.columns:
self.data[par] = self.data[par].round(digits)
def _post_select_importent_columns(self):
# TODO: add more columns, check datamodel
self.data = self.data[self.val_col_names]
def _post_calc_missiong_values(self):
cols = self.data.columns
if not 's_hor_sum' in cols:
if ('s_hor_1' in self.data.columns) & ('s_hor_2'
in self.data.columns):
self.data['s_hor_sum'] = self.data[['s_hor_1',
's_hor_2']].sum(axis=1)
if not 's_vert_sum' in cols:
if ('s_vert_1' in self.data.columns) & ('s_vert_2'
in self.data.columns):
self.data['s_vert_sum'] = self.data[['s_vert_1',
's_vert_2']].sum(axis=1)
def _post_opt_data(self):
#set dtypes:
for col in self.col_as_int:
self.data[col] = self.data[col].astype('int')
for col in self.col_as_float:
try:
self.data[col] = self.data[col].astype('float')
except:
pass
#set index
self.data = self.data.set_index('time')
return True
def _fit_split_data(self):
self._logger.debug('run _fit_split_data')
data_gp = self.data.groupby(self.split_data_based_on_parameter)
data_list = []
for idx, d in data_gp:
if d.empty: continue
if any(d['f'] <= 0.0): continue
#reset N
d['N'] = d['N'] - d['N'].iloc[0] + 1
idx_diff = np.diff(d.index)
dt_mean = idx_diff.mean()
gaps = idx_diff > (4 * dt_mean)
has_gaps = any(gaps)
if has_gaps == False:
data_list.append(d)
else:
#FIX: GAP FINDING
data_list.append(d)
"""
print('has gaps')
print(gaps)
idx_gaps = (np.where(gaps)[0] - 1)[0]
print(idx_gaps)
data_list.append(d.iloc[0:idx_gaps])
"""
#add self.
if len(data_list) == 0:
self.num_tests = 0
self.data = data_list[0]
else:
self.num_tests = len(data_list)
self.data = data_list
#break
nchunks = len(self.data)
self._logger.debug(f'data splited in {nchunks} chunks')
def _fit_select_data(self):
"""
select N load cycles from original data
(a): Based on window of TP-Asphalt
(b) last N cycles
DUMMY FUNCTION
"""
pass
def _calc(self):
"""
Calculate Results
DUMMY FUNCTION
"""
self._logger.info('run _calc base')
print('run BASE')
def save(self):
'''
save results to database
DUMMY FUNCTION
'''
pass
def _pre_run(self):
if not hasattr(self, 'data'):
self._read_from_s3_to_bytesio()
self._calc_hash_of_bytesio()
self._define_data_models()
#self._data_in_db()
self._set_parameter()
self.update_parameter()
self._define_units()
def run(self): def run(self):
self._logger.info('run task') self._logger.info('run task')
self._read_from_s3_to_bytesio()
self._calc_hash_of_bytesio() self._process_data()
self._meta_to_float()
self._bytes_to_df()
self._standardize_meta()
res = self._calc() self._standardize_data()
self._logger.debug(f'results: {res}') self._modify_meta()
self._validate_meta()
self._archive_binary_data() self._validate_data()
return res self._post_string_to_float()
self._post_select_importent_columns()
self._post_apply_units()
self._post_round_values()
self._post_calc_missiong_values()
self._post_opt_data()
self._fit_split_data()
self._fit_select_data()
self._calc()
#self._logger.info(f'results: {self.fit['E']}')

1159
src/paveit/labtest/citt.py Normal file → Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,91 @@
import numpy as np
import pandas as pd
from paveit.functions.citt import calc_E
class CittAnalyseFatigue():
def _fit_split_data(self):
data_exp = []
N = self.data['N'].unique()
N = np.array(N)
gaps = N[1:][np.diff(N)>1]
for i,gap in enumerate(gaps):
print(i, gap)
if i == 0:
f = self.data['N']<gap
elif i == len(gaps):
f = self.data['N']>=gap
else:
f = (self.data['N']>=gaps[i-1]) & (self.data['N']<gap)
# filter data by geps
d = self.data[f]
# get 5 cycles
if i == 0:
f = (d['N']>=98) & (d['N']<=102)
else:
Nsel = d['N'].unique()
f = (d['N']>=Nsel[-5]) & (d['N']<=Nsel[-1])
d = d[f]
data_exp.append(d)
self.data = data_exp
def _fit_select_data(self):
''' analyse data
'''
pass
def _calc(self):
print('calc fatigue')
print(self.metadata)
fit = []
# Je Aufzeichnungsintervall
for i, d in enumerate(self.data):
try:
res = calc_E(d, metadata=self.metadata, columns_analyse=['F', 's_hor_sum'])
res['idx'] = i
res['energy_ratio'] = res['stiffness']*np.round(res['N_from'] + (res['N_to'] - res['N_from'])/2, 0)
fit.append(res)
except:
raise
self.fit_single_results = pd.DataFrame.from_records(fit)
EN_max = self.fit_single_results['energy_ratio'].max()
sel_f = self.fit_single_results[(self.fit_single_results['energy_ratio']>=0.8*EN_max) & (self.fit_single_results['energy_ratio']<=1.2*EN_max)]
par = np.polyfit(sel_f['N_from'], sel_f['energy_ratio'], 4)
x = np.arange(sel_f['N_from'].min(),sel_f['N_from'].max(), 1)
y = np.polyval(par, x)
Nmakro = x[y.argmax()]
self.fit = {'Nmakro': Nmakro,
'energy_ratio_max': y.max(),
'par_fit': par,
'epislon_elast_98': self.fit_single_results.iloc[0]['el_strains']}

393
src/paveit/labtest/dsv.py Normal file
View File

@@ -0,0 +1,393 @@
import io
import os
from csv import reader
import numpy as np
import pandas as pd
from bson import ObjectId
from paveit import calc_nu, fit_cos
from paveit.io import read_geosys
from paveit.labtest import DataSineLoad
class TP25A1base(DataSineLoad):
def _set_parameter(self):
self._logger.debug('run _set_parameter')
self.split_data_based_on_parameter = ['T', 'sigma', 'f']
self.col_as_int = ['N']
self.col_as_float = [
'T', 'F', 's_piston', 's_hor_1', 'f', 's_hor_1', 's_hor_2'
]
self.val_col_names = [
'time', 'T', 'f', 'sigma', 'N', 'F', 's_hor_1', 's_hor_2', 's_piston'
]
self.columns_analyse = [
'F', 's_hor_sum', 's_hor_1', 's_hor_2', 's_piston'
]
self.round_values = [('T', 3), ('sigma', 3)]
# Header names after standardization; check if exists
self.val_header_names = ['speciment_height', 'speciment_diameter']
self.number_of_load_cycles_for_analysis = 5
#Dummy Data, replace in Machine Config
self.meta_names_of_parameter = {
'sigma': ['Max. Spannung']
} #list of names
#Dummy Data, replace in Machine Config
self.data_column_names = {
'time': ['Time Series'],
'F': ['Load Series'],
's_hor_1': ['LVDT1 Series'],
's_hor_2': ['LVDT2 Series'],
}
def _sel_df(self, df, num=5, shift=-1):
print(df.head())
N = df['N'].unique()
n_N = len(N)
max_N = max(N)
min_N = min(N)
freq = float(df['f'].unique()[0])
# define cycles to select
if freq == 10.0:
Nfrom = 98
Nto = 103
elif freq == 5.0:
Nfrom = 93
Nto = 97
elif freq == 3.0:
Nfrom = 43
Nto = 47
elif freq == 1.0:
Nfrom = 13
Nto = 17
elif freq == 0.3:
Nfrom = 8
Nto = 12
elif freq == 0.1:
Nfrom = 3
Nto = 7
else:
Nfrom = None
Nto = None
self._logger.debug(f'{min_N}, {max_N}, {n_N}, {num}, {shift}')
self._logger.debug(f'Frequenz: {freq}, Nfrom: {Nfrom}, Nto: {Nto}')
# Fall 1: nur num Lastwechsel
if n_N < num:
df_sel = None
elif n_N == num:
df_sel = df
# Fall 2: nicht alle LW in Datei
elif (max_N < Nto) & (n_N > num):
df_sel = df[(df['N'] >= N[-num + shift])
& (df['N'] <= N[-1 + shift])]
# Fall 3: Auswahl wie oben definiert
elif (Nfrom >= min_N) & (Nto < max_N):
df_sel = df[(df['N'] >= Nfrom) & (df['N'] <= Nto)]
# Fall 4: Auswahl unbekannt
else:
df_sel = None
return df_sel
def _fit_select_data(self):
"""
select N load cycles from original data
(a): Based on window of TP-Asphalt
(b) last N cycles
"""
self._logger.debug('run _fit_select_data')
self.max_N_in_data = []
if not isinstance(self.data, list):
if self.number_of_load_cycles_for_analysis > 1:
self.max_N_in_data.append(self.data['N'].max())
df_sel = [
self._sel_df(self.data,
num=self.number_of_load_cycles_for_analysis)
]
else:
df_sel = [self.data]
else:
df_sel = []
for d in self.data:
self.max_N_in_data.append(d['N'].max())
if self.number_of_load_cycles_for_analysis > 1:
d_sel = self._sel_df(
d, num=self.number_of_load_cycles_for_analysis)
else:
d_sel = d
df_sel.append(d_sel)
# replace data
self.data = df_sel
def _calc(self):
self._logger.info('run _calc CITT')
print('run CITT')
self.fit = []
for idx_data, data in enumerate(self.data):
if data is None: continue
if len(data) < 10: continue
try:
self._logger.debug(f'run fit on subset {idx_data}')
data.index = data.index - data.index[0]
res_temp = {}
res_temp['idx'] = idx_data
x = data.index.values
freq = np.round(float(data['f'].unique()), 2)
sigma = float(data['sigma'].unique())
temperature = float(data['T'].unique())
for idxcol, col in enumerate(self.columns_analyse):
if not col in data.columns: continue
y = data[col].values
res = fit_cos(x, y, freq=freq)
for key, value in res.items():
res_temp[f'fit_{col}_{key}'] = value
# analyse cycle data
cycle_min = []
cycle_max = []
cycle_mean = []
cycle_diff = []
for N, data_cycle in data.groupby('N'):
y = data_cycle[col].values
cycle_min.append(y.min())
cycle_max.append(y.max())
cycle_mean.append(y.mean())
cycle_diff.append(cycle_max[-1] - cycle_min[-1])
res_temp[f'fit_{col}_cycle_min'] = cycle_min
res_temp[f'fit_{col}_min'] = np.mean(cycle_min)
res_temp[f'fit_{col}_min_std'] = np.std(cycle_min)
res_temp[f'fit_{col}_min_diff_rel'] = (np.max(cycle_min) - np.min(cycle_min))/np.mean(cycle_min)
res_temp[f'fit_{col}_cycle_max'] = cycle_max
res_temp[f'fit_{col}_max'] = np.mean(cycle_max)
res_temp[f'fit_{col}_max_std'] = np.std(cycle_max)
res_temp[f'fit_{col}_max_diff_rel'] = (np.max(cycle_max) - np.min(cycle_max))/np.mean(cycle_max)
res_temp[f'fit_{col}_cycle_mean'] = cycle_mean
res_temp[f'fit_{col}_mean'] = np.mean(cycle_mean)
res_temp[f'fit_{col}_mean_std'] = np.std(cycle_mean)
res_temp[f'fit_{col}_mean_diff_rel'] = (np.max(cycle_mean) - np.min(cycle_mean))/np.mean(cycle_mean)
res_temp[f'fit_{col}_cycle_diff'] = cycle_diff
res_temp[f'fit_{col}_diff'] = np.mean(cycle_diff)
res_temp[f'fit_{col}_diff_std'] = np.std(cycle_diff)
res_temp[f'fit_{col}_diff_diff_rel'] = (np.max(cycle_diff) - np.min(cycle_diff))/np.mean(cycle_diff)
# add more metadata
res_temp['f_set'] = freq
res_temp['sigma_set'] = sigma
res_temp['T_set'] = temperature
res_temp['N_from'] = data['N'].min()
res_temp['N_to'] = data['N'].max()
res_temp['N_tot'] = self.max_N_in_data[idx_data]
res_temp['n_samples_per_cycle'] = int(
len(data) / (res_temp['N_to'] - res_temp['N_from'] + 1))
## Stiffness
deltaF = res_temp['fit_F_amp']
deltaU = res_temp['fit_s_hor_sum_amp']
h = float(self.metadata['speciment_height'])
d = float(self.metadata['speciment_diameter'])
nu = calc_nu(temperature)
res_temp['nu'] = nu
print(deltaF, deltaU, h, d, nu, np.pi)
#nach TP Asphalt 26
res_temp['stiffness'] = deltaF /(h * deltaU) * (4.0/np.pi -1 + nu)
## Elastische hori. Dehnung
res_temp['el_strains'] = 2*2*deltaU/d * (1+3*nu)/(4 + np.pi*nu - np.pi) * 1000.0 # 2*2 daher, da deltaU nur Ampl. nicht Gesamtkraft ist
# TODO: Überarbeiten und erweitern (ISSUE #2)
res_temp['phase'] = res_temp['fit_F_phase'] - res_temp['fit_s_hor_sum_phase']
except Exception as e:
self._logger.exception(e)
res_temp = None
self._logger.debug(res_temp)
self.fit.append(res_temp)
self.fit = pd.DataFrame.from_records(self.fit)
self.fit = self.fit.reset_index(drop=True).set_index('idx')
#self.fit = self.fit.set_index(['T', 'f', 'sigma'])
nsamples = len(self.fit)
self._logger.info(f'fitting finished, add {nsamples} samples')
self._logger.debug(self.fit['stiffness'])
def save(self,
task_id: ObjectId,
meta: dict = {}
):
"""
save results to mongodb
"""
if not hasattr(self, 'fit'):
raise
# precheck data and results
#assert len(self.data) == len(self.fit)
for idx_fit, fit in self.fit.iterrows():
data = self.data[idx_fit]
meta['filehash'] = self.filehash
meta['task_id'] = task_id
if not self.metadata['speciment_name'] == None:
meta['speciment_name'] = self.metadata['speciment_name']
else:
meta['speciment_name'] = self.filename
meta['speciment_diameter'] = self.metadata['speciment_diameter']
meta['speciment_height'] = self.metadata['speciment_height']
#check if result in db
#n = CITTSiffness.objects(**meta).count()
#print(n)
# write data
data_dict = fit.to_dict()
data_dict.update(meta)
# remove 'fit_' from keys:
for key in list(data_dict.keys()):
if key.startswith('fit_'):
data_dict[key[4:]] = data_dict[key]
data_dict.pop(key)
# rename fields
def rename_field(d, old, new):
d[new] = d[old]
d.pop(old)
f = CITTSiffnessResults(**data_dict).save()
# required data
data_out = dict(
time=data.index,
F=list(data['F']),
N=list(data['N']),
s_hor_1=list(data['s_hor_1']),
s_hor_2=list(data['s_hor_2']),
s_hor_sum=list(data['s_hor_sum']),
)
self._logger.debug(f'columns data, {data.columns}')
# add optional datas
for col in ['s_piston']:
if col in data.columns:
self._logger.debug(f'add {col} to output data')
data_out[col] = list(data[col])
outkeys = list(data_out.keys())
self._logger.debug(f'write raw data to db, {outkeys}')
g = CITTSiffness(result=f.id, **data_out).save()
class TP25A1_TUDresdenWille(TP25A1base):
def _define_units(self):
self.unit_s = 1 / 1000. #mm
self.unit_F = 1.0 #N
self.unit_t = 1. #s
def update_parameter(self):
self.meta_names_of_parameter = {
'sigma': ['Oberspannung'],
'sigma_min': ['Unterspannung'],
'f': ['Frequenz'],
'T': ['Versuchstemperatur'],
't': ['Zeit'],
't_pulse': ['Impulsdauer'],
't_break': ['Lastpauseit'],
'speciment_diameter': ['PK-Durchmesser'],
'speciment_height': ['PK-Höhe'],
'punch_diameter': ['Stempeldurchmesser'],
'speciment_name': ['Probekörperbezeichnung'],
} #list of names
self.data_column_names = {
'time': ['Zeit'],
'F': ['Kraft'],
'N': ['Zyklenzähler'],
's_vert_1': ['Vertikalweg 1'],
's_vert_2': ['Vertikalweg 2'],
's_vert_3': ['Vertikalweg 3'],
's_piston': ['Kolbenweg'],
}
def _process_data(self):
meta, data = read_geosys(self.data, '015', metadata_ids=['001','003', '005'])
#define in class
self.data = data.reset_index()
self.metadata.update(meta)
# log infos
self._logger.debug(f'metadata: {self.metadata}')
self._logger.debug(f'data: {self.data.head()}')
print(data.head())

918
src/paveit/labtest/sheartest.py Normal file → Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,4 @@
from .citt import citt
__all__ = ['citt',
]

View File

@@ -0,0 +1,135 @@
import datetime
import lmfit as lm
import numpy as np
import pandas as pd
import scipy.special as sf
from bson import ObjectId
from scipy.optimize import curve_fit
from paveit.datamodels import CITTSiffnessResults, RegCITT
def temp_freq_equivalence(T, f, phi, T0=20.0):
alphaT = np.exp(phi * ((1 / (T + 273.15)) - (1 / (T0 + 273.15))))
x = np.log(f * alphaT) / np.log(10)
return x
def stiffness_tp26(T, f, phi, Emax, Emin, z0, z1, T0=20.0):
x = temp_freq_equivalence(T, f, phi, T0)
E = Emin + (Emax - Emin) / (1 + np.exp(z1 * x + z0))
return E
def calc_nu(T):
#TODO: Prüfen ob Formel stimmt!
nu = 0.15 + (0.35) / (1 + np.exp(3.1849 - 0.04233 * (9 / 5 * T + 32)))
return nu
def citt(task_id: str):
"""
Postprocessing task
"""
print('postprocessing')
task_id = ObjectId(task_id)
# read all data
data = []
parlist = ['f_set', 'T_set', 'stiffness', 'phase']
for obj in CITTSiffnessResults.objects(task_id=task_id).only(*parlist):
data.append(dict((k, obj[k]) for k in parlist ))
data = pd.DataFrame.from_records(data)
#Emax/Emin
line_mod = lm.models.LinearModel()
out = line_mod.fit(data.stiffness, x=data.phase)
Emax = line_mod.eval(out.params, x=0.0)
Emin = 0
assert Emin < Emax
# Fit data
mod = lm.models.Model(stiffness_tp26, independent_vars=['f','T'])
mod.set_param_hint(
'Emin',
value=Emin,
min=0,
max=0.9*Emax,
vary=True,
)
mod.set_param_hint(
'Emax',
value=Emax,
min=0.9*Emax,
max=1.1*Emax,
vary=True,
)
mod.set_param_hint(
'T0',
value=20.0,
vary=False,
)
mod.set_param_hint('phi', value=25000, min=15000, max=35000, vary=True)
mod.set_param_hint('z0', value=1,min=1e-10, max=1000., vary=True)
mod.set_param_hint('z1', value=-1, min=-1000., max=-1e-10, vary=True)
parms_fit = [
mod.param_hints['Emin']['value'], mod.param_hints['Emax']['value'],
mod.param_hints['phi']['value'], mod.param_hints['z0']['value'],
mod.param_hints['z1']['value']
]
## run fit
results = []
r2 = []
try:
methods = ['leastsq', 'powell']
for method in methods:
result = mod.fit(data.stiffness, T=data.T_set, f=data.f_set, method=method, verbose=False)
r2temp = 1.0 - result.redchi / np.var(data.stiffness.values, ddof=2)
r2.append(r2temp)
results.append(result)
best = np.nanargmax(r2)
res = results[best].best_values
res['stat_r2'] = r2[best]
except:
print('error regression, send default values')
res = mod.valuesdict()
#add metadata
res['nsamples'] = len(data)
res['task_id'] = task_id
res['date'] = datetime.datetime.now()
print(res)
# save results to db
doc = RegCITT.objects(task_id=task_id).modify(upsert=True, **res)
return True

0
tests/__init__.py Executable file
View File

0
tests/analysis/__init__.py Executable file
View File

52
tests/analysis/citt_test.py Executable file
View File

@@ -0,0 +1,52 @@
import logging
import os
import toml
from src.paveit.helper import read_file_to_bytesio
from src.paveit.labtest.citt import CITT_PTMDortmund
logger = logging.getLogger(__name__)
def test_base_class():
pass
def test_citt_ptmdortmund():
data_path = 'tests/data/citt/PTM_Dortmund'
res_dict = toml.load(os.path.join(data_path, 'meta.toml'))
logger.info(res_dict)
for filename, meta in res_dict.items():
logger.info(f'run test on: {filename}, {meta}')
file = os.path.join(data_path, filename)
buf = read_file_to_bytesio(file)
metadata = {'org': 'pytest_ptm_dortmund'}
res = CITT_PTMDortmund(filename, metadata, archive=False,
data=buf)
res.run()
fit = res.fit.reset_index()
logger.info(fit.head())
assert len(fit) == 5
m = res_dict[filename]
for col in ['F', 's_hor_sum', 's_hor_1', 's_hor_2']:
assert all(fit[f'fit_{col}_r2'] >= m['min_r2'])
sel = fit[(fit['f']==10.0) & (fit['T']==20.0)].iloc[0]
Emin = (1-m['max_diff'])*m['stiffness_10Hz']
Emax = (1+m['max_diff'])*m['stiffness_10Hz']
assert Emin <= sel['E'] <= Emax

116
tests/analysis/sine_test.py Executable file
View File

@@ -0,0 +1,116 @@
from random import uniform
import numpy as np
from paveit.analysis.regression import fit_cos, fit_cos_eval
def fit(freq: float = 10,
ampl: float = 100.0,
offset: float = 20.0,
slope: float = 0.1,
phase: float = 0.05,
error: float = 0.001) -> None:
N: int = 5
num_samples_per_cycle: int = 50
t = np.linspace(0, N / freq, N * num_samples_per_cycle)
y = ampl * np.cos(2 * np.pi * freq * t + phase) + slope * t + offset
r = fit_cos(t, y)
error_min = (1 - error)
error_max = (1 + error)
# ampltude
rel_error = (r['amp'] / ampl)
assert error_min <= rel_error <= error_max
# offset
rel_error = (r['offset'] / offset)
assert error_min <= rel_error <= error_max
# slope
rel_error = (r['slope'] / slope)
assert error_min <= rel_error <= error_max
# phase
rel_error = (r['phase'] / phase)
assert error_min <= rel_error <= error_max
# freq
rel_error = (r['freq'] / freq)
assert error_min <= rel_error <= error_max
def test_fit_simple_sine(ntest: int = 50) -> None:
"""
fit a simple sine signal and evaluate amplitude
error: percentage error of ampl, Error max 0.1 %
"""
fit()
#run multiple tests with random parameters
for i in range(ntest):
fit(
ampl=uniform(1e-3, 1000),
offset=uniform(1e-3, 1),
slope=uniform(1e-5, 1),
phase=uniform(1e-5, 1),
)
def fit_noise(freq: float = 10,
ampl: float = 100.0,
offset: float = 20.0,
slope: float = 0.1,
phase: float = 0.05,
noise_level: float = 0.01,
error: float = 0.01) -> None:
N: int = 5
num_samples_per_cycle: int = 50
t = np.linspace(0, N / freq, N * num_samples_per_cycle)
y = ampl * np.cos(2 * np.pi * freq * t + phase) + slope * t + offset
y_noise = np.random.normal(0, noise_level * ampl, len(t))
y = y + y_noise
r = fit_cos(t, y)
error_min = (1 - error)
error_max = (1 + error)
# ampltude
rel_error = (r['amp'] / ampl)
assert error_min <= rel_error <= error_max
# freq
rel_error = (r['freq'] / freq)
assert error_min <= rel_error <= error_max
def test_fit_simple_sine_with_noise(ntest: int = 50) -> None:
"""
fit a simple sine signal and evaluate amplitude
error: percentage error of ampl, Error max 0.1 %
"""
fit_noise()
#run multiple tests with random parameters
for i in range(ntest):
fit_noise(
ampl=uniform(1e-3, 1000),
offset=uniform(1e-3, 1),
slope=uniform(1e-5, 1),
phase=uniform(1e-5, 1),
noise_level=uniform(0.01, 0.1),
error=0.02,
)

View File

@@ -0,0 +1,14 @@
["sample_01.xlsm"]
min_r2 = 0.993
max_diff = 0.005 #%
stiffness_10Hz = 2269.0 #MPa
["sample_02.xlsm"]
min_r2 = 0.993
max_diff = 0.005 #%
stiffness_10Hz = 2250.0 #MPa
["sample_03.xlsm"]
min_r2 = 0.993
max_diff = 0.005 #%
stiffness_10Hz = 2231.0 #MPa

Binary file not shown.

Binary file not shown.

Binary file not shown.

0
tests/helper/__init__.py Executable file
View File

View File

@@ -0,0 +1,24 @@
import glob
import logging
import os
from src.paveit.helper import read_file_to_bytesio
logger = logging.getLogger(__name__)
data_path = 'tests/data/citt/PTM_Dortmund'
def test_read_file_compare_filesize():
files = glob.glob(os.path.join(data_path, '*.xlsm'))
for file in files:
file_stat = os.stat(file)
file_size = file_stat.st_size
buf = read_file_to_bytesio(file)
buf_size = buf.getbuffer().nbytes
assert file_size == buf_size