Skip to content

Commit

Permalink
Merge branch 'development' into refactor_NSRDB
Browse files Browse the repository at this point in the history
  • Loading branch information
cdeline committed May 20, 2024
2 parents 04c8c68 + 5ba4d5e commit d880260
Show file tree
Hide file tree
Showing 319 changed files with 171,074 additions and 4,215 deletions.
Binary file added .DS_Store
Binary file not shown.
3 changes: 2 additions & 1 deletion .github/workflows/pytest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
strategy:
fail-fast: false # don't cancel other matrix jobs when one fails
matrix:
python-version: ["3.8"]
python-version: ["3.8","3.11"]
# Test two environments:
# 1) dependencies with pinned versions from requirements.txt
# 2) 'pip install --upgrade --upgrade-strategy=eager .' to install upgraded
Expand Down Expand Up @@ -69,6 +69,7 @@ jobs:
SMARTSPATH: /home/runner/work/bifacial_radiance/bifacial_radiance/SMARTS_295_Linux

- name: Coveralls
if: matrix.python-version == 3.11 # && ${{ matrix.env }} == '-r requirements.txt .[all]'
run: |
coveralls --service=github
env:
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -119,3 +119,7 @@ ENV/

# Rope project settings
.ropeproject

# training
training/tutorials/TEMP/
training/tutorials/Mandy/
Binary file added bifacial_radiance/.DS_Store
Binary file not shown.
Binary file not shown.
18 changes: 18 additions & 0 deletions bifacial_radiance/data/module.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,22 @@
{
"PVmod": {
"bifi": 1,
"glass": false,
"modulefile": "objects/PVmod.rad",
"modulematerial": "black",
"numpanels": 1,
"offsetfromaxis": 0,
"scenex": 2.01,
"sceney": 1.0,
"scenez": 0.1,
"text": "! genbox black PVmod 2 1 0.02 | xform -t -1.0 -0.5 0 -a 1 -t 0 1.0 0",
"x": 2,
"xgap": 0.01,
"y": 1,
"ygap": 0.0,
"z": 0.02,
"zgap": 0.1
},
"PrismSolar-Bi60": {
"bifi": 1,
"cellModule": {
Expand Down
2 changes: 1 addition & 1 deletion bifacial_radiance/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ def runBifacialRadiance():
analysisParamsDict, cellModuleDict, inputvariablefile = read_valuesfromGUI()

#get a return out of runModelChain and pass it back out of the GUI.
self.data, self.analysis = bifacial_radiance.modelchain.runModelChain(simulationParamsDict=simulationParamsDict,
self.data, analysis = bifacial_radiance.modelchain.runModelChain(simulationParamsDict=simulationParamsDict,
sceneParamsDict=sceneParamsDict,
timeControlParamsDict=timeControlParamsDict,
moduleParamsDict=moduleParamsDict,
Expand Down
198 changes: 123 additions & 75 deletions bifacial_radiance/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,6 @@ def loadTrackerDict(trackerdict, fileprefix=None):
import re, os
import numpy as np


# get list of filenames in \results\
filelist = sorted(os.listdir('results'))

Expand Down Expand Up @@ -297,8 +296,57 @@ def loadTrackerDict(trackerdict, fileprefix=None):
return(trackerdict, totaldict)
#end loadTrackerDict subroutine. set demo.Wm2Front = totaldict.Wm2Front. demo.Wm2Back = totaldict.Wm2Back

def getResults(trackerdict, cumulativesky=False):
"""
Iterate over trackerdict and return irradiance results
following analysis1axis runs
Parameters
----------
trackerdict : dict
trackerdict, after analysis1axis has been run
cumulativesky : Bool
determines whether trackerdict index is labeled 'timestamp' or 'angle'
Returns
-------
results : Pandas.DataFrame
dataframe containing irradiance scan results.
"""
import pandas as pd
from pandas import DataFrame as df

results = pd.DataFrame(None)

def _printRow(analysisobj, key):
if cumulativesky:
keyname = 'theta'
else:
keyname = 'timestamp'
return pd.concat([pd.DataFrame({keyname:key},index=[0]),
analysisobj.getResults(),
analysisobj.power_data
], axis=1)

for key in trackerdict:
try:
extra_columns = ['surf_azm','surf_tilt','theta','temp_air']
data_extra = df(dict([(col,trackerdict[key][col]) \
for col in extra_columns if col in trackerdict[key]]),
index=[0])

for analysis in trackerdict[key]['AnalysisObj']:
results = pd.concat([results,
pd.concat([_printRow(analysis, key),data_extra], axis=1)
], ignore_index=True)
except KeyError:
pass

return results.loc[:,~results.columns.duplicated()]

def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False):
def _exportTrackerDict(trackerdict, savefile, cumulativesky=False, reindex=False, monthlyyearly=False):
"""
Save a TrackerDict output as a ``.csv`` file.
Expand All @@ -320,32 +368,31 @@ def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False

print("Exporting TrackerDict")

# convert trackerdict into dataframe
d = df.from_dict(trackerdict,orient='index',columns=['dni','dhi','ghi', 'temp_air',
'wind_speed', 'theta','surf_tilt','surf_azm',
# Not including the whole distribution because these are not clean..
'POA_eff', 'Gfront_mean',
'Grear_mean',
'Pout_raw', 'Mismatch', 'Pout', 'Pout_Gfront'])
d['measdatetime'] = d.index


# add trackerdict Results (not all simulations will have results)
try:
results = pd.concat([df(data=value['Results'],index=[key]*len(value['Results'])) for (key,value) in trackerdict.items()])
results = results[['rowWanted','modWanted','Wm2Front','Wm2Back']]
d = results.join(d)
except KeyError:
pass
d0 = getResults(trackerdict, cumulativesky)


d0.rename(columns={'Wind Speed':'wind_speed'}, inplace=True)

columnlist = ['timestamp', 'rowNum', 'modNum', 'sceneNum', 'name', 'Wm2Front', 'Wm2Back','DNI','DHI','GHI',
'temp_air', 'wind_speed','theta','surf_tilt','surf_azm', 'POA_eff','Gfront_mean',
'Grear_mean', 'Pout_raw', 'Mismatch', 'Pout', 'Pout_Gfront']
d = df.from_dict(d0).loc[:, d0.columns.isin(columnlist)]
d = d.reindex(columns=[k for k in columnlist])


# TODO: Continue work from here...



if reindex is True: # change to proper timestamp and interpolate to get 8760 output
d['measdatetime'] = d.index
d=d.set_index(pd.to_datetime(d['measdatetime'], format='%Y-%m-%d_%H%M'))

d=d.set_index(pd.to_datetime(d['timestamp'], format='%Y-%m-%d_%H%M'))
try:
d=d.resample('H').asfreq()
except ValueError:
print('Warning: Unable to reindex - possibly duplicate entries in trackerdict')
temp = d.groupby(d.index).mean(numeric_only=True)
d=temp.resample('H').asfreq()
#print('Warning: Unable to reindex - possibly duplicate entries in trackerdict')

# Add tabs:
d.to_csv(savefile)
Expand All @@ -355,59 +402,60 @@ def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False
D2join = pd.DataFrame()
D3join = pd.DataFrame()
D4join = pd.DataFrame()
for rownum in d['rowWanted'].unique():
for modnum in d['modWanted'].unique():
mask = (d['rowWanted']==rownum) & (d['modWanted']==modnum)
print(modnum)
# Gfront_mean.append(filledFront[mask].sum(axis=0).mean())
D2 = d[mask].copy()
D2['timestamp'] = pd.to_datetime(D2['measdatetime'], format="%Y-%m-%d_%H%M")
D2 = D2.set_index('timestamp')
# D2 = D2.set_index(D2['timestamp'])

# Determine if data is sub-hourly
if len(D2) > 1:
if (D2.index[1]-D2.index[0]).total_seconds() / 60 < 60.0:
# Subhourly to hourly data averages, doesn't sum
# So we get average hourly irradiance as well as Wh on
# results of power.
D2b = D2.copy()
D2b = D2b.groupby(pd.PeriodIndex(D2b.index, freq="H")).mean(numeric_only=True).reset_index()
D2b['BGG'] = D2b['Grear_mean']*100/D2b['Gfront_mean']
D2b['BGE'] = (D2b['Pout']-D2b['Pout_Gfront'])*100/D2b['Pout']
D2b['Mismatch'] = (D2b['Pout_raw']-D2b['Pout'])*100/D2b['Pout_raw']
D2b['rowWanted'] = rownum
D2b['modWanted'] = modnum
D2b.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)
D2b=D2b.reset_index()
D2join = pd.concat([D2join, D2b], ignore_index=True, sort=False)

D3 = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).sum().reset_index()
D3['BGG'] = D3['Grear_mean']*100/D3['Gfront_mean']
D3['BGE'] = (D3['Pout']-D3['Pout_Gfront'])*100/D3['Pout']
D3['Mismatch'] = (D3['Pout_raw']-D3['Pout'])*100/D3['Pout_raw']
D3['rowWanted'] = rownum
D3['modWanted'] = modnum
D3m = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).mean(numeric_only=True).reset_index()
D3['temp_air'] = D3m['temp_air']
D3['wind_speed'] = D3m['wind_speed']
D3.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)

D4 = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).sum().reset_index()
D4['BGG'] = D4['Grear_mean']*100/D4['Gfront_mean']
D4['BGE'] = (D4['Pout']-D4['Pout_Gfront'])*100/D4['Pout']
D4['Mismatch'] = (D4['Pout_raw']-D4['Pout'])*100/D4['Pout_raw']
D4['rowWanted'] = rownum
D4['modWanted'] = modnum
D4m = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).mean(numeric_only=True).reset_index()
D4['temp_air'] = D4m['temp_air']
D4['wind_speed'] = D4m['wind_speed']
D4.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)

D3=D3.reset_index()
D4=D4.reset_index()
D3join = pd.concat([D3join, D3], ignore_index=True, sort=False)
D4join = pd.concat([D4join, D4], ignore_index=True, sort=False)
for rownum in d['rowNum'].unique():
for modnum in d['modNum'].unique():
for sceneNum in d['sceneNum'].unique():#TODO: is sceneNum iteration required here?
mask = (d['rowNum']==rownum) & (d['modNum']==modnum) & (d['sceneNum']==sceneNum)
print(modnum)
# Gfront_mean.append(filledFront[mask].sum(axis=0).mean())
D2 = d[mask].copy()
D2['timestamp'] = pd.to_datetime(D2['timestamp'], format="%Y-%m-%d_%H%M")
D2 = D2.set_index('timestamp')
# D2 = D2.set_index(D2['timestamp'])

# Determine if data is sub-hourly
if len(D2) > 1:
if (D2.index[1]-D2.index[0]).total_seconds() / 60 < 60.0:
# Subhourly to hourly data averages, doesn't sum
# So we get average hourly irradiance as well as Wh on
# results of power.
D2b = D2.copy()
D2b = D2b.groupby(pd.PeriodIndex(D2b.index, freq="H")).mean(numeric_only=True).reset_index()
D2b['BGG'] = D2b['Grear_mean']*100/D2b['Gfront_mean']
D2b['BGE'] = (D2b['Pout']-D2b['Pout_Gfront'])*100/D2b['Pout']
D2b['Mismatch'] = (D2b['Pout_raw']-D2b['Pout'])*100/D2b['Pout_raw']
D2b['rowNum'] = rownum
D2b['modNum'] = modnum
D2b.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)
D2b=D2b.reset_index()
D2join = pd.concat([D2join, D2b], ignore_index=True, sort=False)

D3 = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).sum(numeric_only=True).reset_index()
D3['BGG'] = D3['Grear_mean']*100/D3['Gfront_mean']
D3['BGE'] = (D3['Pout']-D3['Pout_Gfront'])*100/D3['Pout']
D3['Mismatch'] = (D3['Pout_raw']-D3['Pout'])*100/D3['Pout_raw']
D3['rowNum'] = rownum
D3['modNum'] = modnum
D3m = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).mean(numeric_only=True).reset_index()
D3['temp_air'] = D3m['temp_air']
D3['wind_speed'] = D3m['wind_speed']
D3.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)

D4 = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).sum(numeric_only=True).reset_index()
D4['BGG'] = D4['Grear_mean']*100/D4['Gfront_mean']
D4['BGE'] = (D4['Pout']-D4['Pout_Gfront'])*100/D4['Pout']
D4['Mismatch'] = (D4['Pout_raw']-D4['Pout'])*100/D4['Pout_raw']
D4['rowNum'] = rownum
D4['modNum'] = modnum
D4m = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).mean(numeric_only=True).reset_index()
D4['temp_air'] = D4m['temp_air']
D4['wind_speed'] = D4m['wind_speed']
D4.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)

D3=D3.reset_index()
D4=D4.reset_index()
D3join = pd.concat([D3join, D3], ignore_index=True, sort=False)
D4join = pd.concat([D4join, D4], ignore_index=True, sort=False)

savefile2 = savefile[:-4]+'_Hourly.csv'
savefile3 = savefile[:-4]+'_Monthly.csv'
Expand Down
Loading

0 comments on commit d880260

Please sign in to comment.