Commit 9b169dac authored by Bognár, Á.'s avatar Bognár, Á.
Browse files

Merge branch 'develop-postporcess' into 'master'

Develop postporcess

See merge request !1
parents 4b8dc378 9943b2fe
......@@ -18,12 +18,17 @@ Daypym is a project to preprocess and run DAYSIM solar irradiance simulations wi
* Visualizing irradiance simulation results
# Tutorial
# Tutorials
## Generate sensorpoints and run DAYSIm simulations with Daypym
See example_workflow.ipynb jupyter notebook in the example_workflow folder.
To start it:
* clone or copy this repository to your computer
* open a terminal window and navigate to the example_workflow folder in the cloned repository
* type: jupyter notebook example_workflow.ipynb
Alternatively, you can open and run (from the same location) the file.
Alternatively, you can open and run (from the same location) the file.
\ No newline at end of file
## Use irradaince results for PV power simulations with [PVMismatch](
(You need to have [PVMismatch installed](
In this example we compare the PV power results calculated with a mean uniform irradiance:
* after doing the previous tutorial, open and run (from the same location) the file.
......@@ -5,6 +5,7 @@ from geomeppy import view_geometry
import pathlib
from matplotlib import pyplot as plt
import os
import json
import subprocess # This should be used instead of os.system. Fix it later.
from geomeppy.geom.polygons import (
......@@ -210,7 +211,7 @@ def create_sensor_points(surf_name, points_in_poly_2d, row_index, col_index, ori
return sensor_points_surf
def translate_to_ds_pts(surf_sensor_points, p_name):
"""Translates and saves the sensorpoints as Daysim .pts file. Also saves a csv with additional info useful for postprocessing"""
"""Translates and saves the sensorpoints as Daysim .pts file. Also saves a csv or json with additional info useful for postprocessing"""
x, y, z, vx, vy, vz, surf_name, ri, ci = ([] for i in list(range(9)))
for surf in surf_sensor_points:
for i in list(range(len(surf['sensor_points']))):
......@@ -229,7 +230,13 @@ def translate_to_ds_pts(surf_sensor_points, p_name):
for p in spdf.index:
ptsfile.write(str(spdf['x'][p]) +' '+ str(spdf['y'][p]) +' '+ str(spdf['z'][p]) +' '+ str(spdf['vx'][p]) +' '+ str(spdf['vy'][p]) +' '+ str(spdf['vz'][p]) + '\n')
# saving metadatata of sensor points in csv or json format
for s in surf_sensor_points:
spi = spdf[spdf['surf_name']==s['surf_name']].index.tolist()
s.update({'sp_index':[int(i) for i in spi]})
with open('pts/{}.json'.format(p_name), 'w') as fp:
json.dump(surf_sensor_points, fp, indent=4)
def translate_to_ds_pts_depr1(surf_sensor_points, outputpath_pts, outputpath_kts):
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import os
# we need the ill result, the pts and kts and merge this into some structure, that the corresponding surface name, the sensor point coords (maybe even reelative coords on the surface?) and the irrad value are in one object
# navigate to the project folder and run
def ill_to_df(p_name):
pts = pd.read_csv(r'pts\{}.csv'.format(p_name))
surfnames = list(pts['surf_name']) # read the surf names fom the csv
# make a list of unique names for the ptss. These will be the col names for the ill dataframe
ptsnames = []
for i in list(range(len(surfnames))):
ptsnames.append(surfnames[i] + 'c{}r{}'.format(pts.loc[i, 'ci'], pts.loc[i, 'ri']))
ill = pd.read_csv(r'res\{}.ill'.format(p_name), sep=' ', skipinitialspace=True, header=None, dtype=np.float64)
# converting fraction hours seems complicated because of rounding errors, so for now, lazily we generate our own timestamps from the first two rows
timestepmins = pd.Timedelta(minutes=int(round((ill[2][1] - ill[2][0]) * 60)))
startdate = pd.Timestamp('2001-01-01 00:00:00') + timestepmins / 2 # date will be always 2001 for now
enddate = pd.Timestamp('2002-01-01 00:00:00') - timestepmins / 2
Timestamp = pd.date_range(start=startdate, end=enddate, freq='{}T'.format(str(timestepmins.seconds/60)))
# getting rid of old time columns and adding the new timestamp + naming the columns
ill.drop([0, 1, 2], axis=1, inplace=True)
oldcol = list(ill.columns)
ill.rename(columns=dict(zip(oldcol, ptsnames)), inplace=True)
ill['Timestamp'] = Timestamp
ill.set_index('Timestamp', inplace=True)
return ill
def view_idf_to_ax(fname=None, idf_txt=None, test=False):
"""This is originally from
This just returns an ax instead of viewing it on order to plot it together with the sensorpoints"""
from geomeppy.view_geometry import _get_collection, _get_collections, _get_surfaces, _get_limits # probably these should not be imported here
from io import StringIO
from eppy.iddcurrent import iddcurrent
from geomeppy import IDF
# type: (Optional[str], Optional[str], Optional[bool]) -> None
if fname and idf_txt:
raise ValueError("Pass either fname or idf_txt, not both.")
# set the IDD for the version of EnergyPlus
iddfhandle = StringIO(iddcurrent.iddtxt)
if IDF.getiddname() is None:
if fname:
# import the IDF
idf = IDF(fname)
elif idf_txt:
idf = IDF()
# create the figure and add the surfaces
ax = plt.axes(projection="3d")
collections = _get_collections(idf, opacity=0.5)
for c in collections:
# calculate and set the axis limits
limits = _get_limits(idf=idf)
return ax
def view_idf_and_ill(p_name, idf_name, sps, ill, save_fig):
from geomeppy.geom.polygons import Polygon3D
"To view the e+ IDF and the DS sensorpoints together. Ill should be a pd.series with time in index"
# TODO implement auto save fig: Save_fig False: no saving, True: save it to /geo
surfcoords = []
polys = []
for surf in sps:
for sp in surf['sensor_points']:
surfcoords.append((sp[0], sp[1], sp[2]))
xs = [c[0] for c in surfcoords]
ys = [c[1] for c in surfcoords]
zs = [c[2] for c in surfcoords]
ax2 = view_idf_to_ax(fname=idf_name, idf_txt=None, test=False)
ax2.scatter(xs, ys, zs, marker='o', s=2, c=ill, cmap='nipy_spectral', vmin=0, vmax=1000)
if save_fig:
plt.savefig('res/vis/{}_{}.png'.format(p_name, str(':', '')))
......@@ -5,6 +5,7 @@ from geomeppy import view_geometry
import os
import sys
sys.path.append(os.path.dirname(os.getcwd()) + r'\\daypym')
from daypym import *
from geomeppy.geom.polygons import (break_polygons, Polygon2D, Polygon3D, Vector2D, Vector3D)
from geomeppy.geom.transformations import align_face, invert_align_face
......@@ -46,10 +47,10 @@ for surface in act_surfs: # consider making this for a function already. The on
ri, ci = pos_in_module(n_row=10, n_col=6, test_grid_transp=test_grid)
sp = create_sensor_points(surf_name=surface['Name'], points_in_poly_2d=pip, row_index=ri, col_index=ci, original_poly=poly, sp_offset=0.01, sp_pos_round=3) # sensor point data dict (daypym)
output = open(r'{}.pkl'.format(project_name), 'wb') # dumping the sps structure to a picke, because we need it later for posprocess
pickle.dump(sps, output)
# use the sensor-point data dicts to make .pts and .kts files (daypym)
#output = open(r'{}.pkl'.format(project_name), 'wb') # dumping the sps structure to a picke, because we need it later for posprocess
#pickle.dump(sps, output)
# use the sensor-point data dicts to make .pts and .kts files (daypym). Alsoe we can save a .csv or .json file with more info for later use
translate_to_ds_pts(surf_sensor_points=sps, p_name=project_name)
## great.. but now it would be nice to visualize if the sps are in the right place:
view_idf_and_sps(p_name=project_name, idf_name=source_idf_name, sps=sps) # plot the sensorpoints and the idf together (daypym-geomeppy)
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import json
import os
import sys
sys.path.append(os.path.dirname(os.getcwd()) + r'\\daypym')
from postprocess import *
from pvmismatch.pvmismatch_lib import pvcell, pvconstants, pvmodule, pvstring, pvsystem
# func to set the irradiance values for the PVMM pv modules
def ill_to_mod_suns(pv_mod, ill_t, si, mi):
nb = len(pv_mod.cell_pos)
nc = len(pv_mod.cell_pos[0]) * nb
nr = len(pv_mod.cell_pos[0][0])
mod_cell_idxs = [cell['idx'] for cell in np.array(pv_mod.cell_pos).flatten()]
Ee = []
for c in range(nc):
for r in range(nr):
Ee.append(ill_t['s{}m{}c{}r{}'.format(si, mi, c, r)] / 1000)
return Ee, mod_cell_idxs
# project name
for f in os.listdir():
if f.endswith('.hea'):
project_name = f[:-4]
# read (part of the) irradiance output
start = pd.Timestamp('2001-03-06 07:02:30')
end = pd.Timestamp('2001-03-06 19:57:30')
ill = ill_to_df(p_name=project_name)[start:end]
ill_mean = ill.mean(axis=1)
# read sensor point metadata json
with open('pts/{}.json'.format(project_name), 'r') as fp:
sps = json.load(fp)
# read pv system layout.
system_layout = pd.read_csv(r'{}_pv_sys_layout.csv'.format(project_name), index_col='mod_nr')
str_len = len(system_layout)
str_num = len(system_layout.columns)
# set cell parameters and build the pv modules, strings and system
v_bypass = np.float64(-0.5) # [V] trigger voltage of bypass diode
cell_area = np.float64(246.49) # [cm^2] cell area
pv_mod_pattern = pvmodule.standard_cellpos_pat(nrows=10, ncols_per_substr=[2]*3)
pv_mod = pvmodule.PVmodule(cell_pos=pv_mod_pattern, pvcells=None, pvconst=None, Vbypass=v_bypass, cellArea=cell_area)
pv_str = pvstring.PVstring(numberMods=str_len, pvmods=[pv_mod]*str_len)
pv_sys = pvsystem.PVsystem(numberStrs=str_num, pvstrs=[pv_str]*str_num, numberMods=[str_len]*str_num, pvmods=[pv_mod]*str_len)
# calulate dc power with cell level and mean irradiance
dc_power = []
for t in ill.index:
for s in list(system_layout.columns):
for m in list(system_layout.index):
Ee , mod_cell_idxs = ill_to_mod_suns(pv_mod=pv_mod, ill_t=ill.loc[t], si=s, mi=m)
pv_sys.setSuns({s:{m:[Ee, mod_cell_idxs]}})
dc_power_ill_mean = []
for t in ill.index:
pv_sys.setSuns(ill_mean[t] / 1000)
system_power = pd.DataFrame(index=ill.index, data={'dc_power_cell_level_irrad':dc_power, 'dc_power_mean_irrad':dc_power_ill_mean})
plt.ylabel('Pdc [W]')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment