Base Model für CITT erstellt, PTM Dortmund ergänzt, Tests hinzugefügt

This commit is contained in:
Markus Clauß
2023-02-28 13:56:11 +01:00
parent b248a7e9b1
commit e861dbf10e
17 changed files with 917 additions and 103 deletions

180
.gitignore vendored Normal file
View File

@@ -0,0 +1,180 @@
temp
.DS_Store
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# ---> VisualStudioCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix

4
debug.csv Normal file
View File

@@ -0,0 +1,4 @@
,fit_F_amp,fit_F_freq,fit_F_phase,fit_F_offset,fit_F_slope,fit_F_r2,fit_F_max,fit_F_min,f,sigma,fit_s_hor_sum_amp,fit_s_hor_sum_freq,fit_s_hor_sum_phase,fit_s_hor_sum_offset,fit_s_hor_sum_slope,fit_s_hor_sum_r2,fit_s_hor_sum_max,fit_s_hor_sum_min,fit_s_hor_1_amp,fit_s_hor_1_freq,fit_s_hor_1_phase,fit_s_hor_1_offset,fit_s_hor_1_slope,fit_s_hor_1_r2,fit_s_hor_1_max,fit_s_hor_1_min,fit_s_hor_2_amp,fit_s_hor_2_freq,fit_s_hor_2_phase,fit_s_hor_2_offset,fit_s_hor_2_slope,fit_s_hor_2_r2,fit_s_hor_2_max,fit_s_hor_2_min,nu,E
0,1162.037522728264,0.09999816445250176,3.2731742438169205,1657.4959341169797,0.022890975975805593,0.9999709812370754,2822.8786686693848,498.4860405788809,0.1,0.2,0.004904662057765795,0.09994473426198426,3.274570732678786,0.004472897149678457,3.4796345898322193e-06,0.9995438125784065,0.009632119781608398,-0.00042915385165576136,0.0022048443407161134,0.0999473113711256,3.2789165848392394,0.002036487114427019,1.317283541472095e-06,0.9992245191638016,0.0043773692868893654,-0.00022888205421645047,0.0026998634649033275,0.0999425971739857,3.271026693390654,0.00243640933189622,2.1623427295265008e-06,0.9993713553565571,0.005254750494719032,-0.0002479555587344695,0.2983926664681502,2260.236445571626
1,1163.9861551163267,0.29999672326752724,3.271466866301432,1657.5773060905333,0.023592068619978698,0.999977491807627,2827.1702071859427,492.85935674606014,0.30003,0.2,0.004904630239776472,0.30002953724325576,3.261420279897325,0.004476978416102744,2.2128929628375675e-05,0.9997651921759285,0.009765634313234614,-0.0004482273561737665,0.0021960586065051407,0.300085988714776,3.2617587973425652,0.0020390391186955238,8.035203621628222e-06,0.9992996273163816,0.004420284672054908,-0.0002098085496983204,0.0027085993503841803,0.29998369085814713,3.2611491963027257,0.002437939646841411,1.4093566880537998e-05,0.9995179610005985,0.005354886393438715,-0.0002384188064754461,0.2983926664681502,2264.0413462626584
2,1173.2940951101361,3.0019781539143713,3.1127799064755783,1652.6775323274487,2.2793532011736803,0.9997118511163391,2828.2192499344346,494.76670719786375,3.003,0.2,0.004927618845400971,3.0012837674744888,3.1051127487990566,0.004715737141843021,-1.2305236334063097e-05,0.998488708969846,0.009899148844860886,-0.0004005435948787328,0.0022065238872148044,3.0014146858816817,3.110359353742398,0.0021183309358349563,-8.842607057128579e-06,0.9965020191798836,0.004558567579810768,-0.00018119829292129186,0.002721172122260612,3.0011630113467382,3.100932209486545,0.00259739494570079,-3.4648940648246214e-06,0.9979287207765057,0.0054359487876403795,-0.000257492310993479,0.2983926664681502,2271.499199111919
1 fit_F_amp fit_F_freq fit_F_phase fit_F_offset fit_F_slope fit_F_r2 fit_F_max fit_F_min f sigma fit_s_hor_sum_amp fit_s_hor_sum_freq fit_s_hor_sum_phase fit_s_hor_sum_offset fit_s_hor_sum_slope fit_s_hor_sum_r2 fit_s_hor_sum_max fit_s_hor_sum_min fit_s_hor_1_amp fit_s_hor_1_freq fit_s_hor_1_phase fit_s_hor_1_offset fit_s_hor_1_slope fit_s_hor_1_r2 fit_s_hor_1_max fit_s_hor_1_min fit_s_hor_2_amp fit_s_hor_2_freq fit_s_hor_2_phase fit_s_hor_2_offset fit_s_hor_2_slope fit_s_hor_2_r2 fit_s_hor_2_max fit_s_hor_2_min nu E
2 0 1162.037522728264 0.09999816445250176 3.2731742438169205 1657.4959341169797 0.022890975975805593 0.9999709812370754 2822.8786686693848 498.4860405788809 0.1 0.2 0.004904662057765795 0.09994473426198426 3.274570732678786 0.004472897149678457 3.4796345898322193e-06 0.9995438125784065 0.009632119781608398 -0.00042915385165576136 0.0022048443407161134 0.0999473113711256 3.2789165848392394 0.002036487114427019 1.317283541472095e-06 0.9992245191638016 0.0043773692868893654 -0.00022888205421645047 0.0026998634649033275 0.0999425971739857 3.271026693390654 0.00243640933189622 2.1623427295265008e-06 0.9993713553565571 0.005254750494719032 -0.0002479555587344695 0.2983926664681502 2260.236445571626
3 1 1163.9861551163267 0.29999672326752724 3.271466866301432 1657.5773060905333 0.023592068619978698 0.999977491807627 2827.1702071859427 492.85935674606014 0.30003 0.2 0.004904630239776472 0.30002953724325576 3.261420279897325 0.004476978416102744 2.2128929628375675e-05 0.9997651921759285 0.009765634313234614 -0.0004482273561737665 0.0021960586065051407 0.300085988714776 3.2617587973425652 0.0020390391186955238 8.035203621628222e-06 0.9992996273163816 0.004420284672054908 -0.0002098085496983204 0.0027085993503841803 0.29998369085814713 3.2611491963027257 0.002437939646841411 1.4093566880537998e-05 0.9995179610005985 0.005354886393438715 -0.0002384188064754461 0.2983926664681502 2264.0413462626584
4 2 1173.2940951101361 3.0019781539143713 3.1127799064755783 1652.6775323274487 2.2793532011736803 0.9997118511163391 2828.2192499344346 494.76670719786375 3.003 0.2 0.004927618845400971 3.0012837674744888 3.1051127487990566 0.004715737141843021 -1.2305236334063097e-05 0.998488708969846 0.009899148844860886 -0.0004005435948787328 0.0022065238872148044 3.0014146858816817 3.110359353742398 0.0021183309358349563 -8.842607057128579e-06 0.9965020191798836 0.004558567579810768 -0.00018119829292129186 0.002721172122260612 3.0011630113467382 3.100932209486545 0.00259739494570079 -3.4648940648246214e-06 0.9979287207765057 0.0054359487876403795 -0.000257492310993479 0.2983926664681502 2271.499199111919

View File

@@ -1,4 +1,5 @@
# main __init__.py # main __init__.py
from .analysis import * from .analysis import *
from .functions import *
from .helper import * from .helper import *
from .labtest import * from .labtest import *

View File

@@ -0,0 +1 @@
from .citt import *

View File

@@ -0,0 +1,16 @@
import numpy as np
def stiffness_tp26(T, f, Emax, Emin, phi, z0, z1, T0=20.0):
alphaT = np.exp(phi * ((1 / (T + 273.15)) - (1 / (T0 + 273.15))))
x = np.log(f * alphaT) / np.log(10)
E = Emin + (Emax - Emin) / (1 + np.exp(z0 * x + z1))
return E
def calc_nu(T):
#TODO: Prüfen ob Formel stimmt!
nu = 0.15 + (0.35) / (1 + np.exp(3.1849 - 0.04233 * (9 / 5 * T + 32)))
return nu

View File

@@ -1,6 +1,8 @@
from .filehandling import read_file_to_bytesio
from .filehasher import calc_hash_of_bytes from .filehasher import calc_hash_of_bytes
from .minio import get_minio_client_archive, get_minio_client_processing from .minio import get_minio_client_archive, get_minio_client_processing
__all__ = ['get_minio_client_archive', 'get_minio_client_processing', __all__ = ['read_file_to_bytesio',
'get_minio_client_archive', 'get_minio_client_processing',
'calc_hash_of_bytes' 'calc_hash_of_bytes'
] ]

View File

@@ -0,0 +1,12 @@
import logging
from io import BytesIO
logger = logging.getLogger(__name__)
def read_file_to_bytesio(filename: str):
with open(filename, "rb") as fh:
buf = BytesIO(fh.read())
return buf

View File

@@ -1,11 +1,13 @@
# coding: utf-8 # coding: utf-8
import io import io
import logging
import numpy as np
import pandas as pd import pandas as pd
from paveit.analysis import fit_cos
from paveit.functions import calc_nu
from paveit.helper import calc_hash_of_bytes, get_minio_client_processing from paveit.helper import calc_hash_of_bytes, get_minio_client_processing
from worker import app, logger
class DataSineLoad(): class DataSineLoad():
""" """
@@ -13,14 +15,64 @@ class DataSineLoad():
""" """
def __init__(self, filename:str , metadata: dict): def __init__(self,
filename: str,
metadata: dict,
archive: bool = True,
debug: bool = False,
data: None | io.BytesIO = None):
self.filename = filename self.filename = filename
self.metadata = metadata self.metadata = metadata
self._logger = logger if isinstance(data, io.BytesIO):
self.data = data
self._logger.info(f'filename s3: {self.filename}, metadata: {self.metadata}') self.archive_data = archive
self.debug = debug
self._logger = logging.getLogger(__name__)
self._logger.info(
f'filename s3: {self.filename}, metadata: {self.metadata}')
self._pre_run()
def _set_parameter(self):
self.split_data_based_on_parameter = ['T', 'sigma', 'f']
self.col_as_int = ['N']
self.col_as_float = ['T', 'F', 's_piston', 's_hor_1', 'f', 's_hor_1', 's_hor_2']
self.val_col_names = ['time', 'T', 'f', 'sigma', 'N', 'F', 's_hor_1', 's_hor_2']
self.columns_analyse = ['F','s_hor_sum','s_hor_1','s_hor_2','s_piston']
# Header names after standardization; check if exists
self.val_header_names = ['speciment_height', 'speciment_diameter']
self.number_of_load_cycles_for_analysis = 5
self.meta_names_of_parameter = {
'sigma': ['Max. Spannung']
} #list of names
self.data_column_names = {
'time': ['Time Series'],
'F': ['Load Series'],
's_hor_1': ['LVDT1 Series'],
's_hor_2': ['LVDT2 Series'],
}
def update_parameter():
""" update standard prameter from function self._set_parameter()"""
pass
def _define_units(self):
self.unit_s = 1 #mm
self.unit_F = 1 #N
self.unit_t = 1 / 1000. #s
def _connect_to_s3(self): def _connect_to_s3(self):
self._logger.info('connect to db') self._logger.info('connect to db')
@@ -30,16 +82,15 @@ class DataSineLoad():
def _read_from_s3_to_bytesio(self): def _read_from_s3_to_bytesio(self):
self._logger.info('read bytes') self._logger.info('read bytes')
try: try:
self._connect_to_s3() self._connect_to_s3()
response = self.__minioClient.get_object('processing', self.filename) response = self.__minioClient.get_object('processing',
self.filename)
self.data = response.data self.data = response.data
finally: finally:
response.close() response.close()
response.release_conn() response.release_conn()
self.data = io.BytesIO(self.data) self.data = io.BytesIO(self.data)
def _calc_hash_of_bytesio(self): def _calc_hash_of_bytesio(self):
@@ -48,43 +99,306 @@ class DataSineLoad():
self.data.seek(0) self.data.seek(0)
self._logger.debug(f'Hash of file: {self.filehash}') self._logger.debug(f'Hash of file: {self.filehash}')
def _process_data(self):
""" convert self.data (BytesIO) to pandas.DataFrame, update
self.metadata with informations from file """
def _bytes_to_df(self):
self._logger.debug('convert bytes to pandas.DataFrame') self._logger.debug('convert bytes to pandas.DataFrame')
encoding = 'utf-8' encoding = 'utf-8'
self.df = pd.read_csv(self.data, encoding=encoding) self.data = pd.read_csv(self.data, encoding=encoding)
def _standardize_data(self):
colnames = list(self.data.columns)
for par, names in self.data_column_names.items():
for name in names:
colnames = [sub.replace(name, par) for sub in colnames]
self.data.columns = colnames
print(self.data.head(5))
def _standardize_meta(self):
for par, names in self.meta_names_of_parameter.items():
for name in names:
if name in self.metadata:
self.metadata[par] = self.metadata[name]
self.metadata.pop(name)
break
def _validate_data(self):
for name in self.val_col_names:
if not name in self.data.columns:
raise
def _validate_meta(self):
for name in self.val_header_names:
if not name in self.metadata:
raise
def _post_apply_units(self):
for col in ['s_hor_sum', 's_hor_1', 's_hor_2']:
if col in self.data.columns:
self.data[col] = self.data[col].mul(self.unit_s)
for col in ['F']:
self.data[col] = self.data[col].mul(self.unit_F)
for col in ['time']:
self.data[col] = self.data[col].mul(self.unit_t)
return True
def _post_select_importent_columns(self):
# TODO: add more columns, check datamodel
self.data = self.data[self.val_col_names]
def _post_calc_missiong_values(self):
cols = self.data.columns
if not 's_hor_sum' in cols:
self.data['s_hor_sum'] = self.data[['s_hor_1',
's_hor_2']].sum(axis=1)
def _post_opt_data(self):
#set dtypes:
for col in self.col_as_int:
self.data[col] = self.data[col].astype('int')
for col in self.col_as_float:
try:
self.data[col] = self.data[col].astype('float')
except:
pass
#set index
self.data = self.data.set_index('time')
return True
def _fit_split_data(self):
data_gp = self.data.groupby(self.split_data_based_on_parameter)
data_list = []
for idx, d in data_gp:
idx_diff = np.diff(d.index)
dt_mean = idx_diff.mean()
gaps = idx_diff > (4 * dt_mean)
has_gaps = any(gaps)
if has_gaps == False:
data_list.append(d)
else:
#FIX: GAP FINDING
data_list.append(d)
"""
print('has gaps')
print(gaps)
idx_gaps = (np.where(gaps)[0] - 1)[0]
print(idx_gaps)
data_list.append(d.iloc[0:idx_gaps])
"""
#add self.
if len(data_list) == 0:
self.num_tests = 0
self.data = data_list[0]
else:
self.num_tests = len(data_list)
self.data = data_list
#break
def _fit_select_data(self):
"""
select N load cycles from original data
(a): Based on window of TP-Asphalt
(b) last N cycles
"""
def sel_df(df, num=5):
N = df['N'].unique()
freq = float(df['f'].unique()[0])
# define cycles to select
if freq == 10.0:
Nfrom = 98
Nto = 103
elif freq == 5.0:
Nfrom = 93
Nto = 97
elif freq == 3.0:
Nfrom = 43
Nto = 47
elif freq == 1.0:
Nfrom = 13
Nto = 17
elif freq == 0.3:
Nfrom = 8
Nto = 12
elif freq == 0.1:
Nfrom = 3
Nto = 7
else:
Nfrom = None
Nto = None
# Fall 1: nicht alle LW in Datei
if (max(N) < Nto) & (len(N) >= num):
df_sel = df[(df['N'] >= N[-num]) & (df['N'] <= N[-1])]
# Fall 2:
else:
if Nfrom != None:
if len(N) > Nto - Nfrom:
df_sel = df[(df['N'] >= Nfrom) & (df['N'] <= Nto)]
return df_sel
if not isinstance(self.data, list):
if self.number_of_load_cycles_for_analysis > 1:
df_sel = [
sel_df(self.data,
num=self.number_of_load_cycles_for_analysis)
]
else:
df_sel = [self.data]
else:
df_sel = []
for d in self.data:
if self.number_of_load_cycles_for_analysis > 1:
d_sel = sel_df(d,num=self.number_of_load_cycles_for_analysis)
else:
d_sel = d
df_sel.append(d_sel)
# replace data
self.data = df_sel
def _calc(self): def _calc(self):
self._logger.debug('calc data')
return self.df.mean().mean() print(len(self.data))
self.fit = []
for idx_data, data in enumerate(self.data):
if data is None: continue
if len(data) < 10: continue
data.index = data.index - data.index[0]
res_temp = {}
x = data.index.values
freq = np.round(float(data['f'].unique()),2)
sigma = float(data['sigma'].unique())
temperature = float(data['T'].unique())
for idxcol, col in enumerate(self.columns_analyse):
if not col in data.columns: continue
y = data[col].values
res = fit_cos(x,y, freq=freq)
for key, value in res.items():
res_temp[f'fit_{col}_{key}'] = value
res_temp[f'fit_{col}_max'] = max(y)
res_temp[f'fit_{col}_min'] = min(y)
res_temp['f'] = freq
res_temp['sigma'] = sigma
res_temp['T'] = temperature
## Stiffness
deltaF = res_temp['fit_F_amp']
nu = calc_nu(temperature)
res_temp['nu'] = nu
h = float(self.metadata['speciment_height'])
deltaU = res_temp['fit_s_hor_sum_amp']
res_temp['E'] = (deltaF * (0.274 + nu)) / (h * deltaU)
self.fit.append(res_temp)
self.fit = pd.DataFrame.from_records(self.fit)
self.fit = self.fit.set_index(['T', 'f', 'sigma'])
print(self.fit)
def _archive_binary_data(self): def _archive_binary_data(self):
self._logger.debug('send file to archive') self._logger.debug('send file to archive')
app.send_task('ArchiveFile', args=[self.filename, app.send_task(
self.metadata, 'ArchiveFile',
self.filehash, args=[self.filename, self.metadata, self.filehash, 'org', 'citt'],
'org', queue='archive')
'citt'
],
queue='archive'
)
def _pre_run(self):
if not hasattr(self, 'data'):
self._read_from_s3_to_bytesio()
self._calc_hash_of_bytesio()
self._set_parameter()
self.update_parameter()
self._define_units()
def run(self): def run(self):
self._logger.info('run task') self._logger.info('run task')
self._read_from_s3_to_bytesio()
self._calc_hash_of_bytesio()
self._bytes_to_df() self._process_data()
res = self._calc() self._standardize_data()
self._logger.debug(f'results: {res}') self._standardize_meta()
self._validate_data()
self._validate_meta()
self._archive_binary_data() self._post_select_importent_columns()
self._post_apply_units()
self._post_calc_missiong_values()
self._post_opt_data()
return res self._fit_split_data()
self._fit_select_data()
self._calc()
#self._logger.debug(f'results: {res}')
#if self.archive_data:
# self._archive_binary_data()
#return res

View File

@@ -5,6 +5,7 @@ from csv import reader
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from paveit.labtest import DataSineLoad from paveit.labtest import DataSineLoad
from torch import isin
class CITTBase(DataSineLoad): class CITTBase(DataSineLoad):
@@ -15,7 +16,7 @@ class CITT_KIT(DataSineLoad):
def _calc(self): def _calc(self):
return (self.df.mean().mean(), self.df.max().max()) return (self.df.mean().mean(), self.df.max().max())
def _bytes_to_df(self): def _process_data(self):
logger.debug('convert bytes to pandas.DataFrame') logger.debug('convert bytes to pandas.DataFrame')
self.data.seek(0) self.data.seek(0)
@@ -93,20 +94,41 @@ class CITT_KIT(DataSineLoad):
#res = res.sort_values(['f', 'ZEIT']) #res = res.sort_values(['f', 'ZEIT'])
#define in class #define in class
self.df = res.reset_index() self.data = res.reset_index()
class CITT_PTMDortmund(DataSineLoad): class CITT_PTMDortmund(DataSineLoad):
def _calc(self):
return (self.df.mean().mean(), self.df.max().max())
def _bytes_to_df(self): def _define_units(self):
self.unit_s = 1 #mm
self.unit_F = 1000. #N
self.unit_t = 1. #s
def update_parameter(self):
self.meta_names_of_parameter = {'sigma': ['Max. Spannung', 'Max Stress'],
'f': ['Frequenz', 'Frequency'],
'T': ['Versuchstemperatur', 'Target Test Temperature'],
'Nfrom': ['Erster Aufzeichnungslastwechsel', 'Start Cycle'],
'Nto': ['Letzer Aufzeichnungslastwechsel', 'Last Cycle'],
't': ['Zeitfolgen', 'Time Series'],
'speciment_diameter': ['Durchmesser (mm)', 'Diameter (mm)'],
'speciment_height': ['Länge (mm)', 'Length (mm)'],
} #list of names
self.data_column_names = {
'time': ['Time Series'],
'F': ['Load Series'],
's_hor_1': ['LVDT1 Series'],
's_hor_2': ['LVDT2 Series'],
}
def _process_data(self):
res = [] res = []
xl = pd.ExcelFile(self.data) xl = pd.ExcelFile(self.data)
num_sheets = len(xl.sheet_names) num_sheets = len(xl.sheet_names)
print(num_sheets)
diameter = [] diameter = []
height = [] height = []
@@ -124,52 +146,117 @@ class CITT_PTMDortmund(DataSineLoad):
meta = pd.read_excel(self.data, sheetid, meta = pd.read_excel(self.data, sheetid,
skiprows=1, skiprows=1,
nrows=90) nrows=80)
meta = meta[meta.columns[[0, 2]]] meta = meta[meta.columns[[0, 2]]]
meta = meta.set_index( meta = meta.set_index(
meta.columns[0]).to_dict()[meta.columns[1]] meta.columns[0])
temp['sigma'] = float(meta['Max. Spannung']) meta = meta.dropna(axis=0)
temp['T'] = float(meta['Versuchstemperatur']) meta = meta[meta.columns[0]]
freq = float(meta['Frequenz'])
dt = 1 / freq
temp['f'] = freq
Nfrom = int(meta['Erster Aufzeichnungslastwechsel']) meta = meta.to_dict()
Nto = int(meta['Letzer Aufzeichnungslastwechsel'])
#remove whitespace in dict keys:
meta = {x.strip(): v for x, v in meta.items() if isinstance(x, str)}
frequency_test = None
# add metadata to dataframe
for par in ['sigma', 'f', 'T']:
names = self.meta_names_of_parameter[par]
v = None
for name in names:
try:
v = np.round(float(meta[name]),5)
if par == 'f':
v = np.round(v,2)
break
except:
pass
assert v is not None
temp[par] = v
if par == 'f':
frequency_test = v
# read additional parameters
names = self.meta_names_of_parameter['Nfrom']
for name in names:
try:
Nfrom = int(meta[name])
break
except:
Nfrom = None
assert Nfrom is not None
names = self.meta_names_of_parameter['Nto']
for name in names:
try:
Nto = int(meta[name])
break
except:
Nto = None
assert Nto is not None
#add cycle number to dataframe #add cycle number to dataframe
time_idx = temp['Zeitfolgen'].values names = self.meta_names_of_parameter['t']
N = np.zeros_like(time_idx) for name in names:
self._logger.debug(len(N)) try:
time_idx = temp[name].values
break
except:
time_idx = None
assert time_idx is not None
temp['N'] = 0
self._logger.info(f'cycles from {Nfrom} to {Nto}') self._logger.info(f'cycles from {Nfrom} to {Nto}')
#BUG: Ist in Messdatei falsch definiert und wird von PTM angepasst. ''' #BUG: Ist in Messdatei falsch definiert und wird von PTM angepasst. '''
#for cycle in range(Nfrom, Nto+1): #for cycle in range(Nfrom, Nto+1):
for cycle in range(10):
dt = 1.0/frequency_test
tmax = dt
max_timeindex = max(time_idx)
cycle = 0
while tmax < max_timeindex:
# time window # time window
tmin = (cycle) * dt tmin = (cycle) * dt
tmax = (cycle + 1) * dt tmax = (cycle + 1) * dt
#filter data #filter data
idx = temp[(time_idx >= tmin) idx = temp[(time_idx >= tmin)
& (time_idx < tmax)].index & (time_idx < tmax)].index
#FIX: siehe bug oben
if any(idx)>=500:
idx = idx[idx<500]
#set cycle number #set cycle number
N[idx] = cycle temp.loc[idx, 'N'] = cycle
temp['N'] = N cycle += 1
# add diameter and height to list # add diameter and height to list
diameter.append(float(meta['Durchmesser (mm)'])) names = self.meta_names_of_parameter['speciment_diameter']
height.append(float(meta['Länge (mm)'])) for name in names:
try:
v = float(meta[name])
break
except:
v = None
assert v is not None
diameter.append(v)
names = self.meta_names_of_parameter['speciment_height']
for name in names:
try:
v = float(meta[name])
break
except:
v = None
assert v is not None
height.append(v)
#append data to final dataframe #append data to final dataframe
res.append(temp) res.append(temp)
@@ -179,14 +266,17 @@ class CITT_PTMDortmund(DataSineLoad):
# add data from speciment to metadata # add data from speciment to metadata
if not 'diameter' in self.metadata: #if not 'speciment_diameter' in self.metadata:
self.metadata['diameter'] = np.mean(diameter) # self.metadata['speciment_diameter'] = np.mean(diameter)
if not 'height' in self.metadata: #if not 'speciment_height' in self.metadata:
self.metadata['height'] = np.mean(height) # self.metadata['speciment_height'] = np.mean(height)
#define in class #define in class
self.df = res.reset_index() self.data = res.reset_index()
self.metadata.update(meta)
# log infos # log infos
logger.debug(self.metadata) self._logger.debug(self.metadata)
logger.debug(self.df.head()) self._logger.debug(self.data.head())

0
tests/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,50 @@
import logging
import os
import toml
from src.paveit.helper import read_file_to_bytesio
from src.paveit.labtest.citt import CITT_PTMDortmund
logger = logging.getLogger(__name__)
def test_base_class():
pass
def test_citt_ptmdortmund():
data_path = 'tests/data/citt/PTM_Dortmund'
res_dict = toml.load(os.path.join(data_path, 'meta.toml'))
logger.info(res_dict)
for filename, meta in res_dict.items():
logger.info(f'run test on: {filename}, {meta}')
file = os.path.join(data_path, filename)
buf = read_file_to_bytesio(file)
metadata = {'org': 'pytest_ptm_dortmund'}
res = CITT_PTMDortmund(filename, metadata, archive=False,
data=buf)
res.run()
fit = res.fit.reset_index()
assert len(fit) == 5
m = res_dict[filename]
for col in ['F', 's_hor_sum', 's_hor_1', 's_hor_2']:
assert all(fit[f'fit_{col}_r2'] >= m['min_r2'])
sel = fit[(fit['f']==10.0) & (fit['sigma']==0.2) & (fit['T']==20.0)].iloc[0]
Emin = (1-m['max_diff'])*m['stiffness_10Hz']
Emax = (1+m['max_diff'])*m['stiffness_10Hz']
assert Emin <= sel['E'] <= Emax

116
tests/analysis/sine_test.py Normal file
View File

@@ -0,0 +1,116 @@
from random import uniform
import numpy as np
from paveit.analysis.regression import fit_cos, fit_cos_eval
def fit(freq: float = 10,
ampl: float = 100.0,
offset: float = 20.0,
slope: float = 0.1,
phase: float = 0.05,
error: float = 0.001) -> None:
N: int = 5
num_samples_per_cycle: int = 50
t = np.linspace(0, N / freq, N * num_samples_per_cycle)
y = ampl * np.cos(2 * np.pi * freq * t + phase) + slope * t + offset
r = fit_cos(t, y)
error_min = (1 - error)
error_max = (1 + error)
# ampltude
rel_error = (r['amp'] / ampl)
assert error_min <= rel_error <= error_max
# offset
rel_error = (r['offset'] / offset)
assert error_min <= rel_error <= error_max
# slope
rel_error = (r['slope'] / slope)
assert error_min <= rel_error <= error_max
# phase
rel_error = (r['phase'] / phase)
assert error_min <= rel_error <= error_max
# freq
rel_error = (r['freq'] / freq)
assert error_min <= rel_error <= error_max
def test_fit_simple_sine(ntest: int = 50) -> None:
"""
fit a simple sine signal and evaluate amplitude
error: percentage error of ampl, Error max 0.1 %
"""
fit()
#run multiple tests with random parameters
for i in range(ntest):
fit(
ampl=uniform(1e-3, 1000),
offset=uniform(1e-3, 1),
slope=uniform(1e-5, 1),
phase=uniform(1e-5, 1),
)
def fit_noise(freq: float = 10,
ampl: float = 100.0,
offset: float = 20.0,
slope: float = 0.1,
phase: float = 0.05,
noise_level: float = 0.01,
error: float = 0.01) -> None:
N: int = 5
num_samples_per_cycle: int = 50
t = np.linspace(0, N / freq, N * num_samples_per_cycle)
y = ampl * np.cos(2 * np.pi * freq * t + phase) + slope * t + offset
y_noise = np.random.normal(0, noise_level * ampl, len(t))
y = y + y_noise
r = fit_cos(t, y)
error_min = (1 - error)
error_max = (1 + error)
# ampltude
rel_error = (r['amp'] / ampl)
assert error_min <= rel_error <= error_max
# freq
rel_error = (r['freq'] / freq)
assert error_min <= rel_error <= error_max
def test_fit_simple_sine_with_noise(ntest: int = 50) -> None:
"""
fit a simple sine signal and evaluate amplitude
error: percentage error of ampl, Error max 0.1 %
"""
fit_noise()
#run multiple tests with random parameters
for i in range(ntest):
fit_noise(
ampl=uniform(1e-3, 1000),
offset=uniform(1e-3, 1),
slope=uniform(1e-5, 1),
phase=uniform(1e-5, 1),
noise_level=uniform(0.01, 0.1),
error=0.02,
)

View File

@@ -0,0 +1,4 @@
["sample_01.xlsm"]
min_r2 = 0.993
max_diff = 0.005 #%
stiffness_10Hz = 2269.0 #MPa

Binary file not shown.

0
tests/helper/__init__.py Normal file
View File

View File

@@ -0,0 +1,24 @@
import glob
import logging
import os
from src.paveit.helper import read_file_to_bytesio
logger = logging.getLogger(__name__)
data_path = 'tests/data/citt/PTM_Dortmund'
def test_read_file_compare_filesize():
files = glob.glob(os.path.join(data_path, '*.xlsm'))
for file in files:
file_stat = os.stat(file)
file_size = file_stat.st_size
buf = read_file_to_bytesio(file)
buf_size = buf.getbuffer().nbytes
assert file_size == buf_size