diff --git a/examples/brick_model_and_sqlite/2_make_rdf.py b/examples/brick_model_and_sqlite/2_make_rdf.py new file mode 100644 index 0000000..05c4bf9 --- /dev/null +++ b/examples/brick_model_and_sqlite/2_make_rdf.py @@ -0,0 +1,101 @@ +import sqlite3 +from rdflib import Graph, Literal, Namespace, RDF, URIRef +from rdflib.namespace import RDFS, XSD + +# Step 1: Set up RDF graph +g = Graph() +brick = Namespace("https://brickschema.org/schema/Brick#") +unit = Namespace("http://qudt.org/vocab/unit/") +ref = Namespace("https://brickschema.org/schema/Reference#") +g.bind("brick", brick) +g.bind("unit", unit) +g.bind("ref", ref) + +# Step 2: Connect to SQLite database +conn = sqlite3.connect("brick_timeseries.db") +cursor = conn.cursor() + +# Step 3: Retrieve timeseries metadata from SQLite database +cursor.execute("SELECT timeseries_id, stored_at FROM TimeseriesReference") +timeseries_refs = cursor.fetchall() + +# Define the database URI +database_uri = URIRef("http://example.org/database") +g.add((database_uri, RDF.type, ref.Database)) +g.add( + ( + database_uri, + RDFS.label, + Literal("SQLite Timeseries Storage", datatype=XSD.string), + ) +) +g.add( + ( + database_uri, + URIRef("http://example.org/connstring"), + Literal("sqlite:///brick_timeseries.db", datatype=XSD.string), + ) +) + +# Step 4: Build RDF model based on the timeseries references +unique_sensors = set() # To track and avoid redundancy +ahu_uris = {} # To track and associate sensors with AHUs + +# List of specific identifiers related to AHU points +ahu_related_identifiers = ["SaStaticSPt", "SaStatic", "SaFanSpeedAO"] + +for timeseries_id, stored_at in timeseries_refs: + timeseries_id = timeseries_id.strip() # Remove any leading/trailing spaces + + # Only process the timeseries if it matches one of the AHU-related identifiers + if any(identifier in timeseries_id for identifier in ahu_related_identifiers): + sensor_uri = URIRef(f"http://example.org/{timeseries_id.replace(' ', '_')}") + + if timeseries_id in unique_sensors: + continue # Skip if this sensor has already been processed + unique_sensors.add(timeseries_id) + + # Determine the AHU to which the sensor belongs (assuming it's part of the ID) + ahu_name = timeseries_id.split("_")[0] # Assuming format like 'AHU1_...' + if ahu_name not in ahu_uris: + ahu_uris[ahu_name] = URIRef(f"http://example.org/{ahu_name}") + g.add((ahu_uris[ahu_name], RDF.type, brick.Air_Handling_Unit)) + + # Adjust sensor type and unit based on sensor name + if "StaticSPt" in timeseries_id: + g.add((sensor_uri, RDF.type, brick.Supply_Air_Static_Pressure_Setpoint)) + g.add((sensor_uri, brick.hasUnit, unit.Inch_Water_Column)) + print("StaticSPt added: ", sensor_uri) + elif "SaStatic" in timeseries_id: + g.add((sensor_uri, RDF.type, brick.Supply_Air_Static_Pressure_Sensor)) + g.add((sensor_uri, brick.hasUnit, unit.Inch_Water_Column)) + print("SaStatic added: ", sensor_uri) + elif "SaFanSpeedAO" in timeseries_id: + g.add((sensor_uri, RDF.type, brick.Supply_Fan_VFD_Speed_Sensor)) + g.add((sensor_uri, brick.hasUnit, unit.Percent)) + print("SaFanSpeedAO added: ", sensor_uri) + + # Associate the sensor with the AHU + g.add((ahu_uris[ahu_name], brick.hasPoint, sensor_uri)) + + timeseries_ref_uri = URIRef( + f"http://example.org/timeseries_{timeseries_id.replace(' ', '_')}" + ) + g.add((timeseries_ref_uri, RDF.type, ref.TimeseriesReference)) + g.add( + ( + timeseries_ref_uri, + ref.hasTimeseriesId, + Literal(timeseries_id, datatype=XSD.string), + ) + ) + g.add((timeseries_ref_uri, ref.storedAt, database_uri)) + g.add((sensor_uri, ref.hasExternalReference, timeseries_ref_uri)) + +# Step 5: Serialize the graph to Turtle format +g.serialize("brick_model_with_timeseries.ttl", format="turtle") + +# Close the connection +conn.close() + +print("RDF model created and saved to 'brick_model_with_timeseries.ttl'.") diff --git a/examples/brick_model_and_sqlite/3_run_query_fc1_brick.py b/examples/brick_model_and_sqlite/3_run_query_fc1_brick.py new file mode 100644 index 0000000..38f2d3e --- /dev/null +++ b/examples/brick_model_and_sqlite/3_run_query_fc1_brick.py @@ -0,0 +1,187 @@ +import sqlite3 +import pandas as pd +from rdflib import Graph, Namespace +import time +from open_fdd.air_handling_unit.faults import FaultConditionOne + +PERCENTAGE_COLS_TO_CONVERT = [ + "Supply_Fan_VFD_Speed_Sensor", # BRICK formatted column name +] + +# Minimal config dict just for fc1 +config_dict_template = { + "INDEX_COL_NAME": "timestamp", + "DUCT_STATIC_COL": "Supply_Air_Static_Pressure_Sensor", + "DUCT_STATIC_SETPOINT_COL": "Supply_Air_Static_Pressure_Setpoint", + "SUPPLY_VFD_SPEED_COL": "Supply_Fan_VFD_Speed_Sensor", + "VFD_SPEED_PERCENT_ERR_THRES": 0.05, + "VFD_SPEED_PERCENT_MAX": 0.99, + "DUCT_STATIC_INCHES_ERR_THRES": 0.1, + "TROUBLESHOOT_MODE": False, + "ROLLING_WINDOW_SIZE": 10, +} + + +def load_rdf_graph(file_path): + print("Loading RDF graph...") + g = Graph() + g.parse(file_path, format="turtle") + return g + + +def run_sparql_query(graph): + print("Running SPARQL query...") + query = """ + PREFIX brick: + PREFIX ref: + + SELECT ?ahu ?sensorType ?sensor WHERE { + ?ahu brick:hasPoint ?sensor . + ?sensor a ?sensorType . + FILTER (?sensorType IN (brick:Supply_Air_Static_Pressure_Sensor, brick:Supply_Air_Static_Pressure_Setpoint, brick:Supply_Fan_VFD_Speed_Sensor)) + } + """ + return graph.query(query) + + +def extract_sensor_data(query_result): + print("SPARQL query completed. Checking results...") + sensor_data = {} + for row in query_result: + ahu = str(row.ahu).split("/")[-1] + sensor_type = str(row.sensorType).split("#")[-1] + sensor_data.setdefault(ahu, {})[sensor_type] = row.sensor + print(f"Found sensor for {ahu}: {sensor_type} -> {row.sensor}") + return sensor_data + + +def retrieve_timeseries_data(sensor_data, conn): + dfs = [] + for ahu, sensors in sensor_data.items(): + print(f"Querying SQLite for AHU: {ahu}") + df_ahu = None + for sensor_type, sensor_uri in sensors.items(): + sensor_id = sensor_uri.split("/")[-1] + print(f"Querying SQLite for sensor: {sensor_id} of type: {sensor_type}") + sql_query = """ + SELECT timestamp, value + FROM TimeseriesData + WHERE sensor_name = ? + """ + df_sensor = pd.read_sql_query(sql_query, conn, params=(sensor_id,)) + if df_sensor.empty: + print( + f"No data found for sensor: {sensor_type} with sensor_id: {sensor_id}" + ) + else: + print( + f"Data found for sensor: {sensor_type}, number of records: {len(df_sensor)}" + ) + df_sensor = df_sensor.rename(columns={"value": sensor_type}) + if df_ahu is None: + df_ahu = df_sensor.set_index("timestamp") + else: + df_ahu = pd.merge( + df_ahu, + df_sensor.set_index("timestamp"), + left_index=True, + right_index=True, + ) + if df_ahu is not None: + dfs.append((ahu, df_ahu)) + return dfs + + +def convert_floats(df, columns): + for column in columns: + df[column] = df[column] / 100.0 + print(df.head()) + return df + + +def run_fault_one(config_dict, df): + fc1 = FaultConditionOne(config_dict) + df = fc1.apply(df) + print(f"Total faults detected: {df['fc1_flag'].sum()}") + return df + + +def update_fault_flags_in_db(df, conn, batch_size=1000): + cursor = conn.cursor() + update_data = [(int(row["fc1_flag"]), index) for index, row in df.iterrows()] + + start_time = time.time() + print("Starting batch update...") + + for i in range(0, len(update_data), batch_size): + print(f"Doing batch {i}") + batch = update_data[i : i + batch_size] + cursor.executemany( + """ + UPDATE TimeseriesData + SET fc1_flag = ? + WHERE timestamp = ? + """, + batch, + ) + conn.commit() + + elapsed_time = time.time() - start_time + minutes, seconds = divmod(elapsed_time, 60) + print( + f"Batch {i//batch_size + 1} completed: {len(batch)} records updated in {int(minutes)} minutes and {int(seconds)} seconds" + ) + + print("Batch update completed.") + total_records = len(update_data) + total_time = time.time() - start_time + records_per_minute = total_records / (total_time / 60) + print(f"Total records updated: {total_records}") + print( + f"Total time taken: {int(total_time // 60)} minutes and {int(total_time % 60)} seconds" + ) + print(f"Records per minute: {records_per_minute:.2f}") + + +def main(): + # Step 1: Load the RDF graph from the Turtle file + g = load_rdf_graph("brick_model_with_timeseries.ttl") + + # Step 2: Run SPARQL query to find AHUs and their sensors + rdf_result = run_sparql_query(g) + + # Step 3: Extract sensor data from SPARQL query result + sensor_data = extract_sensor_data(rdf_result) + + # Step 4: Connect to SQLite database + print("Connecting to SQLite database...") + conn = sqlite3.connect("brick_timeseries.db") + + # Step 5: Retrieve timeseries data from the database for each AHU + ahu_dataframes = retrieve_timeseries_data(sensor_data, conn) + + # Process each AHU separately + for ahu, df_combined in ahu_dataframes: + print(f"Processing data for AHU: {ahu}") + + if df_combined is not None: + # Step 6: Convert analog outputs to floats + df_combined = convert_floats(df_combined, PERCENTAGE_COLS_TO_CONVERT) + + # Step 7: Customize config_dict for each AHU + config_dict = config_dict_template.copy() + + # Step 8: Run fault condition one + df_combined = run_fault_one(config_dict, df_combined) + + # Step 9: Write the fault flags back to the database + update_fault_flags_in_db(df_combined, conn) + + print(f"columns for {ahu}: \n", df_combined.columns) + + # Close the database connection + conn.close() + + +if __name__ == "__main__": + main() diff --git a/examples/brick_model_and_sqlite/tester_for_step_1.py b/examples/brick_model_and_sqlite/tester_for_step_1.py new file mode 100644 index 0000000..b945e63 --- /dev/null +++ b/examples/brick_model_and_sqlite/tester_for_step_1.py @@ -0,0 +1,39 @@ +import sqlite3 +import pandas as pd + +# Connect to the SQLite database +conn = sqlite3.connect("brick_timeseries.db") + +# Query the data +query = """ +SELECT sensor_name, timestamp, value +FROM TimeseriesData +WHERE sensor_name = 'HWR_value' +ORDER BY timestamp ASC +""" +df = pd.read_sql_query(query, conn) + +# Convert the timestamp column to datetime if needed +df["timestamp"] = pd.to_datetime(df["timestamp"]) + +# Set the 'timestamp' column as the index +df.set_index("timestamp", inplace=True) + +# Pivot the DataFrame to make sensor_name the columns and value the data +df_pivot = df.pivot(columns="sensor_name", values="value") + +# Display the DataFrame +print(df_pivot.head()) +print() + +# Display the DataFrame +print("SQL: ", df_pivot.describe()) +print() + +# Close the connection +conn.close() + +# Just for fun see if the CSV file looks any different +csv_file = r"C:\Users\bbartling\Documents\WPCRC_July.csv" +df = pd.read_csv(csv_file) +print("CSV: ", df["HWR_value"].describe()) diff --git a/examples/csv_data_source/ahu_individual_faults.ipynb b/examples/csv_data_source/ahu_individual_faults.ipynb index e07389d..9e7d15f 100644 --- a/examples/csv_data_source/ahu_individual_faults.ipynb +++ b/examples/csv_data_source/ahu_individual_faults.ipynb @@ -17,8 +17,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Note\n", - "This workbook guides you through running fault conditions one at a time and also demonstrates Error Exception Handling in Python. This feature is used within open-fdd to manage cases where incorrect parameters or Pandas DataFrame column names are passed to the fault equations. Any errors you see in the code below are intentional, designed to help with troubleshooting." + "## `MissingColumnError` and `InvalidParameterError`\n", + "This workbook guides you through running fault conditions one at a time and also demonstrates **Error Exception Handling** in Python. This feature is used within open-fdd to manage cases where incorrect parameters or Pandas DataFrame column names are passed to the fault equations. Any errors you see in the code below are intentional, designed to help with troubleshooting." ] }, { @@ -91,8 +91,8 @@ "from IPython.display import Image, display\n", "\n", "# Specify the path to your JPEG file\n", - "image_path1 = r\"C:\\Users\\bbartling\\Documents\\MTPD_AHU.jpg\"\n", - "image_path2 = r\"C:\\Users\\bbartling\\Documents\\MTPD_AHU_points.jpg\"\n", + "image_path1 = r\"C:\\Users\\bbartling\\Documents\\AHU.jpg\"\n", + "image_path2 = r\"C:\\Users\\bbartling\\Documents\\AHU_points.jpg\"\n", "\n", "# Display the images\n", "display(Image(filename=image_path1))\n", @@ -321,307 +321,16 @@ ], "source": [ "# Load your data\n", - "ahu_data = r\"C:\\Users\\bbartling\\Documents\\Midtown_PD_Master.csv\"\n", + "ahu_data = r\"C:\\Users\\bbartling\\Documents\\data.csv\"\n", "df = pd.read_csv(ahu_data)\n", "\n", "df.head()" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Check your data frame for empty cell or NaNs\n" - ] - }, { "cell_type": "code", "execution_count": 6, "metadata": {}, - "outputs": [], - "source": [ - "# Check for NaN values in each column\n", - "for col in df.columns:\n", - " if df[col].isnull().any():\n", - " print(f\"NaN values found in column: {col}\")\n", - "\n", - " # Remove rows with any NaN values, then forward and backfill\n", - " df = df.dropna().ffill().bfill()\n", - " print(\"DataFrame has been cleaned for NaNs and has also been forward and backfilled.\")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Index(['timestamp', 'HWS_Blr1', 'HWS_Blr2', 'Pump_Speed', 'Bypass_Valve',\n", - " 'Bypass_Valve_Feedback', 'HWS_Temp', 'Oa_Temp', 'Eff_DP_SP', 'HWR_Temp',\n", - " 'Flow_Meter', 'HWS_Eff_SP', 'Blr_SP', 'HWS_High_SP', 'HWS_Low_SP',\n", - " 'Wet_DP', 'Eff_Bypass_SP', 'Blr1_Firing_Rate', 'Blr2_Firing_Rate',\n", - " 'SA_FanVFD', 'EA_FanVFD', 'WheelSpeed', 'DaTemp', 'DX1', 'DX2',\n", - " 'EA_DuctSPt', 'SA_Flow', 'EFF_SaCFM', 'DuctStatic', 'PreCoolTemp',\n", - " 'PreCoolHumidity', 'OaTemp', 'CoolCall_In', 'DX_OA_Enable_SP',\n", - " 'DischargeTemp', 'EA_DamperFB', 'EA_Damper', 'EconHiOASPt', 'EffDaSP',\n", - " 'Eff_DaTempSP', 'HC1_DaTemp', 'HC1_VlvFB', 'HC2_DaTemp', 'HC2_VlvFB',\n", - " 'MA_Temp', 'OA_DamperFB', 'RA_Humidity', 'RA_Flow_SP', 'RA_Flow',\n", - " 'RA_FanSpeed', 'RA_Damper_FB', 'RA_CO2_SP', 'OA_RA_Damper',\n", - " 'VAV_CFM_Total', 'Static_SP', 'SaStatic', 'SA_Flow_CFM', 'SA_FanSpeed',\n", - " 'RA_CO2', 'RA_Temp', 'Freq', 'ActivePower', 'ApparentPower',\n", - " 'ReactivePower', 'Freq1', 'ActiveEnergyDelvd', 'PowerFactor',\n", - " 'Voltage_L_N', 'Voltage_L_L', 'Current', 'B008_SpaceTemp',\n", - " 'TAB_103_SpaceTemp', 'TAB_115_SpaceTemp', 'TAB_125_SpaceTemp'],\n", - " dtype='object')" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "df.columns" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This code snippet attempts to determine the minimum position of the outside air damper, which is a critical input for the analysis. It’s important to note that this AHU includes an Energy Recovery Ventilator (ERV), which may impact the accuracy of this determination. AHUs equipped with ERVs might not have a standard 'minimum outside air' position. Additionally, when using rule-based FDD (Fault Detection and Diagnostics) on systems with ERVs, there is an increased risk of encountering false positives, particularly when the mixing temperatures are influenced by the energy recovery wheel." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "count 1880.000000\n", - "mean 9.202298\n", - "std 11.711833\n", - "min 0.000000\n", - "25% 0.000000\n", - "50% 6.550000\n", - "75% 13.680000\n", - "max 100.000000\n", - "Name: OA_RA_Damper, dtype: float64" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Filter the DataFrame\n", - "filtered_df = df[(df['SA_FanSpeed'] > 15.0) & (df['Oa_Temp'] < 10.0)]\n", - "\n", - "# Get the description of the 'OA_Damper' column\n", - "oa_damper_description = filtered_df['OA_RA_Damper'].describe()\n", - "\n", - "# Print the description\n", - "oa_damper_description" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Data Types:\n", - "timestamp object\n", - "HWS_Blr1 float64\n", - "HWS_Blr2 float64\n", - "Pump_Speed float64\n", - "Bypass_Valve float64\n", - " ... \n", - "Current float64\n", - "B008_SpaceTemp float64\n", - "TAB_103_SpaceTemp float64\n", - "TAB_115_SpaceTemp float64\n", - "TAB_125_SpaceTemp float64\n", - "Length: 74, dtype: object\n" - ] - } - ], - "source": [ - "# Checking data types of each column\n", - "print(\"Data Types:\")\n", - "print(df.dtypes)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Max Values:\n", - "HWS_Blr1 142.41\n", - "HWS_Blr2 141.92\n", - "Pump_Speed 93.38\n", - "Bypass_Valve 100.00\n", - "Bypass_Valve_Feedback 99.60\n", - "HWS_Temp 148.36\n", - "Oa_Temp 94.87\n", - "Eff_DP_SP 5.00\n", - "HWR_Temp 139.38\n", - "Flow_Meter 26.25\n", - "HWS_Eff_SP 145.00\n", - "Blr_SP 68.89\n", - "HWS_High_SP 140.00\n", - "HWS_Low_SP 100.00\n", - "Wet_DP 9.57\n", - "Eff_Bypass_SP 10.00\n", - "Blr1_Firing_Rate 89.00\n", - "Blr2_Firing_Rate 85.00\n", - "SA_FanVFD 100.00\n", - "EA_FanVFD 100.00\n", - "WheelSpeed 100.00\n", - "DaTemp 72.87\n", - "DX1 100.00\n", - "DX2 100.00\n", - "EA_DuctSPt 0.60\n", - "SA_Flow 3021.26\n", - "EFF_SaCFM 3000.00\n", - "DuctStatic 0.76\n", - "PreCoolTemp 74.37\n", - "PreCoolHumidity 93.51\n", - "OaTemp 94.87\n", - "CoolCall_In 18.00\n", - "DX_OA_Enable_SP 65.00\n", - "DischargeTemp 74.35\n", - "EA_DamperFB 97.44\n", - "EA_Damper 100.00\n", - "EconHiOASPt 68.00\n", - "EffDaSP 65.00\n", - "Eff_DaTempSP 70.00\n", - "HC1_DaTemp 73.92\n", - "HC1_VlvFB 34.66\n", - "HC2_DaTemp 73.91\n", - "HC2_VlvFB 14.11\n", - "MA_Temp 72.16\n", - "OA_DamperFB 95.25\n", - "RA_Humidity 82.48\n", - "RA_Flow_SP 19990.88\n", - "RA_Flow 15300.72\n", - "RA_FanSpeed 65.25\n", - "RA_Damper_FB 101.17\n", - "RA_CO2_SP 1100.00\n", - "OA_RA_Damper 100.00\n", - "VAV_CFM_Total 10979.18\n", - "Static_SP 0.90\n", - "SaStatic 1.44\n", - "SA_Flow_CFM 19991.18\n", - "SA_FanSpeed 80.25\n", - "RA_CO2 650.94\n", - "RA_Temp 74.16\n", - "Freq 60.13\n", - "ActivePower 43.35\n", - "ApparentPower 56.76\n", - "ReactivePower 37.20\n", - "Freq1 60.13\n", - "ActiveEnergyDelvd 979666.38\n", - "PowerFactor 1.99\n", - "Voltage_L_N 282.77\n", - "Voltage_L_L 489.77\n", - "Current 68.84\n", - "B008_SpaceTemp 71.57\n", - "TAB_103_SpaceTemp 74.42\n", - "TAB_115_SpaceTemp 74.60\n", - "TAB_125_SpaceTemp 75.30\n", - "dtype: float64\n" - ] - } - ], - "source": [ - "with pd.option_context('display.max_rows', None, 'display.max_columns', None):\n", - " print(\"\\nMax Values:\")\n", - " print(df.max(numeric_only=True))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This AHU, as observed in the screenshot, features DX cooling controlled by an analog signal, likely indicating a variable-capacity scroll compressor rather than a binary staged compressor. Open-fdd faults, when calculating operating modes, require a cooling signal to identify when the AHU is in a cooling mode. The code snippet below calculates a `cooling_signal` as a Pandas series, with values ranging from 0.0 to 1.0 to represent the percentage command of the analog output.\n", - "\n", - "The calculations might seem unconventional, but they are designed to provide the rule-based logic with an appropriate 'mode' for the AHU based on the DX cooling signals. This calculation ensures that when the DX cooling is active, the AHU is correctly identified as being in a cooling mode. Additionally, some faults check whether the cooling signal is at 100% but there's insufficient drop in the AHU supply air temperature to flag a fault. This calculation ensures that the `cooling_signal` will approach 1.0 or 100% when the DX cooling is fully engaged but not achieving the setpoint." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " DX1 DX2 cooling_signal\n", - "0 100.00 14.05 0.57025\n", - "1 100.00 22.61 0.61305\n", - "2 100.00 23.33 0.61665\n", - "3 100.00 38.41 0.69205\n", - "4 100.00 38.41 0.69205\n", - "... ... ... ...\n", - "105584 0.00 0.00 0.00000\n", - "105585 72.43 0.00 0.36215\n", - "105586 74.15 0.00 0.37075\n", - "105587 74.05 0.00 0.37025\n", - "105588 0.00 0.00 0.00000\n", - "\n", - "[105589 rows x 3 columns]\n" - ] - } - ], - "source": [ - "# Step 1: Convert DX1 and DX2 to analog signals\n", - "df['DX1_analog'] = df['DX1'] / 100.0 # DX1 is already 0.5 when DX1 is 100\n", - "df['DX2_analog'] = df['DX2'] / 100.0 # DX2 scales from 0.0 to 1.0\n", - "\n", - "# Step 2: Create cooling_signal\n", - "df['cooling_signal'] = df['DX1_analog'] * 0.5 + df['DX2_analog'] * 0.5\n", - "\n", - "# Optional: Drop the intermediate columns if not needed\n", - "df = df.drop(columns=['DX1_analog', 'DX2_analog'])\n", - "\n", - "# View the resulting DataFrame\n", - "print(df[['DX1', 'DX2', 'cooling_signal']])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Caveat on Analog Output Data which would be a unit in percent command or position\n", - "\n", - "As seen above printing the max values, please check columns that represent float values ranging from 0 to 100.0 for the control system's analog outputs. Open-fdd expects these values to be in the range of 0 to 1.0 to represent percentages. Some control system data is provided as 0 to 100.0, while others are in the 0 to 1.0 range. If you pass a 0 to 100.0 value for the control system's analog outputs, the check_analog_pct method will flag this as a `ValueError`. This method checks the data type and range of these values, raising an error if the maximum value exceeds 1.0, ensuring that the data conforms to the expected format. Under the hood logic monitors analog outputs to know the operating state (OS) of the AHU based on heating, cooling, and economizer percent commands as well as supply fan speed reference to know when the AHU is running. ASHRAE and NIST use operating states 0, 1, 2, 3 to represent AHU in a heating mode, economizer, economizer plus mechanical cooling, and mechanical cooling only modes.\n", - "\n", - "```python\n", - "SA_FanVFD 100.00\n", - "OA_RA_Damper 100.00\n", - "SA_FanSpeed 80.25\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, "outputs": [ { "data": { @@ -644,7 +353,6 @@ " \n", " \n", " \n", - " timestamp\n", " HWS_Blr1\n", " HWS_Blr2\n", " Pump_Speed\n", @@ -654,7 +362,9 @@ " Oa_Temp\n", " Eff_DP_SP\n", " HWR_Temp\n", + " Flow_Meter\n", " ...\n", + " Freq1\n", " ActiveEnergyDelvd\n", " PowerFactor\n", " Voltage_L_N\n", @@ -664,13 +374,35 @@ " TAB_103_SpaceTemp\n", " TAB_115_SpaceTemp\n", " TAB_125_SpaceTemp\n", - " cooling_signal\n", + " \n", + " \n", + " timestamp\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", " \n", - " 0\n", - " 2023-10-01 00:00:00\n", + " 2023-10-01 00:00:00\n", " 78.34\n", " 85.02\n", " 45.0\n", @@ -680,21 +412,21 @@ " 71.42\n", " 5.0\n", " 78.88\n", + " 0.53\n", " ...\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 0.0\n", " 70.06\n", " 68.21\n", " 68.89\n", " 69.46\n", - " 0.57025\n", " \n", " \n", - " 1\n", - " 2023-10-01 00:05:00\n", + " 2023-10-01 00:05:00\n", " 78.33\n", " 84.97\n", " 45.0\n", @@ -704,21 +436,21 @@ " 71.39\n", " 5.0\n", " 78.86\n", + " 0.53\n", " ...\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 0.0\n", " 70.06\n", " 68.21\n", " 68.89\n", " 69.46\n", - " 0.61305\n", " \n", " \n", - " 2\n", - " 2023-10-01 00:10:00\n", + " 2023-10-01 00:10:00\n", " 78.33\n", " 84.97\n", " 45.0\n", @@ -728,21 +460,21 @@ " 71.39\n", " 5.0\n", " 78.86\n", + " 0.53\n", " ...\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 0.0\n", " 70.06\n", " 68.21\n", " 68.89\n", " 69.46\n", - " 0.61665\n", " \n", " \n", - " 3\n", - " 2023-10-01 00:15:00\n", + " 2023-10-01 00:15:00\n", " 78.31\n", " 84.93\n", " 45.0\n", @@ -752,21 +484,21 @@ " 71.42\n", " 5.0\n", " 78.80\n", + " 0.53\n", " ...\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 0.0\n", " 70.06\n", " 68.21\n", " 68.89\n", " 69.46\n", - " 0.69205\n", " \n", " \n", - " 4\n", - " 2023-10-01 00:20:00\n", + " 2023-10-01 00:20:00\n", " 78.30\n", " 84.88\n", " 45.0\n", @@ -776,94 +508,430 @@ " 71.52\n", " 5.0\n", " 78.79\n", + " 0.53\n", " ...\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 0.0\n", " 70.06\n", " 68.21\n", " 68.89\n", " 69.46\n", - " 0.69205\n", " \n", " \n", "\n", - "

5 rows × 75 columns

\n", + "

5 rows × 73 columns

\n", "" ], "text/plain": [ - " timestamp HWS_Blr1 HWS_Blr2 Pump_Speed Bypass_Valve \\\n", - "0 2023-10-01 00:00:00 78.34 85.02 45.0 100.0 \n", - "1 2023-10-01 00:05:00 78.33 84.97 45.0 100.0 \n", - "2 2023-10-01 00:10:00 78.33 84.97 45.0 100.0 \n", - "3 2023-10-01 00:15:00 78.31 84.93 45.0 100.0 \n", - "4 2023-10-01 00:20:00 78.30 84.88 45.0 100.0 \n", + " HWS_Blr1 HWS_Blr2 Pump_Speed Bypass_Valve \\\n", + "timestamp \n", + "2023-10-01 00:00:00 78.34 85.02 45.0 100.0 \n", + "2023-10-01 00:05:00 78.33 84.97 45.0 100.0 \n", + "2023-10-01 00:10:00 78.33 84.97 45.0 100.0 \n", + "2023-10-01 00:15:00 78.31 84.93 45.0 100.0 \n", + "2023-10-01 00:20:00 78.30 84.88 45.0 100.0 \n", "\n", - " Bypass_Valve_Feedback HWS_Temp Oa_Temp Eff_DP_SP HWR_Temp ... \\\n", - "0 99.07 77.57 71.42 5.0 78.88 ... \n", - "1 99.08 77.52 71.39 5.0 78.86 ... \n", - "2 99.08 77.52 71.39 5.0 78.86 ... \n", - "3 99.08 77.48 71.42 5.0 78.80 ... \n", - "4 99.08 77.44 71.52 5.0 78.79 ... \n", + " Bypass_Valve_Feedback HWS_Temp Oa_Temp Eff_DP_SP \\\n", + "timestamp \n", + "2023-10-01 00:00:00 99.07 77.57 71.42 5.0 \n", + "2023-10-01 00:05:00 99.08 77.52 71.39 5.0 \n", + "2023-10-01 00:10:00 99.08 77.52 71.39 5.0 \n", + "2023-10-01 00:15:00 99.08 77.48 71.42 5.0 \n", + "2023-10-01 00:20:00 99.08 77.44 71.52 5.0 \n", "\n", - " ActiveEnergyDelvd PowerFactor Voltage_L_N Voltage_L_L Current \\\n", - "0 0.0 0.0 0.0 0.0 0.0 \n", - "1 0.0 0.0 0.0 0.0 0.0 \n", - "2 0.0 0.0 0.0 0.0 0.0 \n", - "3 0.0 0.0 0.0 0.0 0.0 \n", - "4 0.0 0.0 0.0 0.0 0.0 \n", + " HWR_Temp Flow_Meter ... Freq1 ActiveEnergyDelvd \\\n", + "timestamp ... \n", + "2023-10-01 00:00:00 78.88 0.53 ... 0.0 0.0 \n", + "2023-10-01 00:05:00 78.86 0.53 ... 0.0 0.0 \n", + "2023-10-01 00:10:00 78.86 0.53 ... 0.0 0.0 \n", + "2023-10-01 00:15:00 78.80 0.53 ... 0.0 0.0 \n", + "2023-10-01 00:20:00 78.79 0.53 ... 0.0 0.0 \n", + "\n", + " PowerFactor Voltage_L_N Voltage_L_L Current \\\n", + "timestamp \n", + "2023-10-01 00:00:00 0.0 0.0 0.0 0.0 \n", + "2023-10-01 00:05:00 0.0 0.0 0.0 0.0 \n", + "2023-10-01 00:10:00 0.0 0.0 0.0 0.0 \n", + "2023-10-01 00:15:00 0.0 0.0 0.0 0.0 \n", + "2023-10-01 00:20:00 0.0 0.0 0.0 0.0 \n", "\n", - " B008_SpaceTemp TAB_103_SpaceTemp TAB_115_SpaceTemp TAB_125_SpaceTemp \\\n", - "0 70.06 68.21 68.89 69.46 \n", - "1 70.06 68.21 68.89 69.46 \n", - "2 70.06 68.21 68.89 69.46 \n", - "3 70.06 68.21 68.89 69.46 \n", - "4 70.06 68.21 68.89 69.46 \n", + " B008_SpaceTemp TAB_103_SpaceTemp TAB_115_SpaceTemp \\\n", + "timestamp \n", + "2023-10-01 00:00:00 70.06 68.21 68.89 \n", + "2023-10-01 00:05:00 70.06 68.21 68.89 \n", + "2023-10-01 00:10:00 70.06 68.21 68.89 \n", + "2023-10-01 00:15:00 70.06 68.21 68.89 \n", + "2023-10-01 00:20:00 70.06 68.21 68.89 \n", "\n", - " cooling_signal \n", - "0 0.57025 \n", - "1 0.61305 \n", - "2 0.61665 \n", - "3 0.69205 \n", - "4 0.69205 \n", + " TAB_125_SpaceTemp \n", + "timestamp \n", + "2023-10-01 00:00:00 69.46 \n", + "2023-10-01 00:05:00 69.46 \n", + "2023-10-01 00:10:00 69.46 \n", + "2023-10-01 00:15:00 69.46 \n", + "2023-10-01 00:20:00 69.46 \n", "\n", - "[5 rows x 75 columns]" + "[5 rows x 73 columns]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Convert the timestamp column to datetime and set it as the index\n", + "df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n", + "df.set_index(\"timestamp\", inplace=True)\n", + "\n", + "df.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Open-fdd also includes a method to apply rolling averages to your data if necessary. ASHRAE recommends using 1-minute sampled data for 5-minute rolling averages, but you can skip this step, as the method will automatically handle cases where the data is not sampled at 1 minute or less. Refer to the print statements or `warnings` to see how the data is affected.\n", + "\n", + "You can also specify the rolling average interval when using the method, which operates as described under the hood. While ASHRAE recommends 5-minute rolling averages on 1-minute data, obtaining such data can sometimes be challenging.\n", + "\n", + "```python\n", + "def apply_rolling_average_if_needed(self, df, freq=\"1min\", rolling_window=\"5min\")\n", + "```\n", + "\n", + "You can view the method here if desired.\n", + "https://github.com/bbartling/open-fdd/blob/master/open_fdd/air_handling_unit/faults/shared_utils.py#L45\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: If data has a one minute or less sampling frequency a rolling average will be automatically applied\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: Median time difference between consecutive timestamps is 0 days 00:05:00.\n", + "Warning: Skipping any rolling averaging...\n" + ] + } + ], + "source": [ + "from open_fdd.air_handling_unit.faults.helper_utils import HelperUtils\n", + "\n", + "utils = HelperUtils()\n", + "\n", + "df = utils.apply_rolling_average_if_needed(df)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Check your data frame for empty cell or NaNs\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# Check for NaN values in each column\n", + "for col in df.columns:\n", + " if df[col].isnull().any():\n", + " print(f\"NaN values found in column: {col}\")\n", + "\n", + " # Remove rows with any NaN values, then forward and backfill\n", + " df = df.dropna().ffill().bfill()\n", + " print(\"DataFrame has been cleaned for NaNs and has also been forward and backfilled.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['HWS_Blr1', 'HWS_Blr2', 'Pump_Speed', 'Bypass_Valve',\n", + " 'Bypass_Valve_Feedback', 'HWS_Temp', 'Oa_Temp', 'Eff_DP_SP', 'HWR_Temp',\n", + " 'Flow_Meter', 'HWS_Eff_SP', 'Blr_SP', 'HWS_High_SP', 'HWS_Low_SP',\n", + " 'Wet_DP', 'Eff_Bypass_SP', 'Blr1_Firing_Rate', 'Blr2_Firing_Rate',\n", + " 'SA_FanVFD', 'EA_FanVFD', 'WheelSpeed', 'DaTemp', 'DX1', 'DX2',\n", + " 'EA_DuctSPt', 'SA_Flow', 'EFF_SaCFM', 'DuctStatic', 'PreCoolTemp',\n", + " 'PreCoolHumidity', 'OaTemp', 'CoolCall_In', 'DX_OA_Enable_SP',\n", + " 'DischargeTemp', 'EA_DamperFB', 'EA_Damper', 'EconHiOASPt', 'EffDaSP',\n", + " 'Eff_DaTempSP', 'HC1_DaTemp', 'HC1_VlvFB', 'HC2_DaTemp', 'HC2_VlvFB',\n", + " 'MA_Temp', 'OA_DamperFB', 'RA_Humidity', 'RA_Flow_SP', 'RA_Flow',\n", + " 'RA_FanSpeed', 'RA_Damper_FB', 'RA_CO2_SP', 'OA_RA_Damper',\n", + " 'VAV_CFM_Total', 'Static_SP', 'SaStatic', 'SA_Flow_CFM', 'SA_FanSpeed',\n", + " 'RA_CO2', 'RA_Temp', 'Freq', 'ActivePower', 'ApparentPower',\n", + " 'ReactivePower', 'Freq1', 'ActiveEnergyDelvd', 'PowerFactor',\n", + " 'Voltage_L_N', 'Voltage_L_L', 'Current', 'B008_SpaceTemp',\n", + " 'TAB_103_SpaceTemp', 'TAB_115_SpaceTemp', 'TAB_125_SpaceTemp'],\n", + " dtype='object')" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df.columns" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This code snippet attempts to determine the minimum position of the outside air damper, which is a critical input for the analysis. It’s important to note that this AHU includes an Energy Recovery Ventilator (ERV), which may impact the accuracy of this determination. AHUs equipped with ERVs might not have a standard 'minimum outside air' position. Additionally, when using rule-based FDD (Fault Detection and Diagnostics) on systems with ERVs, there is an increased risk of encountering false positives, particularly when the mixing temperatures are influenced by the energy recovery wheel." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "count 1880.000000\n", + "mean 9.202298\n", + "std 11.711833\n", + "min 0.000000\n", + "25% 0.000000\n", + "50% 6.550000\n", + "75% 13.680000\n", + "max 100.000000\n", + "Name: OA_RA_Damper, dtype: float64" ] }, - "execution_count": 12, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# convert only the AHU data\n", - "\n", - "# the floats between 0.0 and 100.0 so we need to convert to 0.0 and 1.0 ranges\n", - "percentage_columns = [\n", - " [\"SA_FanVFD\"],\n", - " [\"OA_RA_Damper\"],\n", - " [\"SA_FanSpeed\"],\n", - "]\n", + "# Filter the DataFrame\n", + "filtered_df = df[(df['SA_FanSpeed'] > 15.0) & (df['Oa_Temp'] < 10.0)]\n", "\n", - "for col in percentage_columns:\n", - " df[col] = df[col] / 100.0\n", + "# Get the description of the 'OA_Damper' column\n", + "oa_damper_description = filtered_df['OA_RA_Damper'].describe()\n", "\n", - "df.head()" + "# Print the description\n", + "oa_damper_description" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Data Types:\n", + "HWS_Blr1 float64\n", + "HWS_Blr2 float64\n", + "Pump_Speed float64\n", + "Bypass_Valve float64\n", + "Bypass_Valve_Feedback float64\n", + " ... \n", + "Current float64\n", + "B008_SpaceTemp float64\n", + "TAB_103_SpaceTemp float64\n", + "TAB_115_SpaceTemp float64\n", + "TAB_125_SpaceTemp float64\n", + "Length: 73, dtype: object\n" + ] + } + ], + "source": [ + "# Checking data types of each column\n", + "print(\"Data Types:\")\n", + "print(df.dtypes)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Max Values:\n", + "HWS_Blr1 142.41\n", + "HWS_Blr2 141.92\n", + "Pump_Speed 93.38\n", + "Bypass_Valve 100.00\n", + "Bypass_Valve_Feedback 99.60\n", + "HWS_Temp 148.36\n", + "Oa_Temp 94.87\n", + "Eff_DP_SP 5.00\n", + "HWR_Temp 139.38\n", + "Flow_Meter 26.25\n", + "HWS_Eff_SP 145.00\n", + "Blr_SP 68.89\n", + "HWS_High_SP 140.00\n", + "HWS_Low_SP 100.00\n", + "Wet_DP 9.57\n", + "Eff_Bypass_SP 10.00\n", + "Blr1_Firing_Rate 89.00\n", + "Blr2_Firing_Rate 85.00\n", + "SA_FanVFD 100.00\n", + "EA_FanVFD 100.00\n", + "WheelSpeed 100.00\n", + "DaTemp 72.87\n", + "DX1 100.00\n", + "DX2 100.00\n", + "EA_DuctSPt 0.60\n", + "SA_Flow 3021.26\n", + "EFF_SaCFM 3000.00\n", + "DuctStatic 0.76\n", + "PreCoolTemp 74.37\n", + "PreCoolHumidity 93.51\n", + "OaTemp 94.87\n", + "CoolCall_In 18.00\n", + "DX_OA_Enable_SP 65.00\n", + "DischargeTemp 74.35\n", + "EA_DamperFB 97.44\n", + "EA_Damper 100.00\n", + "EconHiOASPt 68.00\n", + "EffDaSP 65.00\n", + "Eff_DaTempSP 70.00\n", + "HC1_DaTemp 73.92\n", + "HC1_VlvFB 34.66\n", + "HC2_DaTemp 73.91\n", + "HC2_VlvFB 14.11\n", + "MA_Temp 72.16\n", + "OA_DamperFB 95.25\n", + "RA_Humidity 82.48\n", + "RA_Flow_SP 19990.88\n", + "RA_Flow 15300.72\n", + "RA_FanSpeed 65.25\n", + "RA_Damper_FB 101.17\n", + "RA_CO2_SP 1100.00\n", + "OA_RA_Damper 100.00\n", + "VAV_CFM_Total 10979.18\n", + "Static_SP 0.90\n", + "SaStatic 1.44\n", + "SA_Flow_CFM 19991.18\n", + "SA_FanSpeed 80.25\n", + "RA_CO2 650.94\n", + "RA_Temp 74.16\n", + "Freq 60.13\n", + "ActivePower 43.35\n", + "ApparentPower 56.76\n", + "ReactivePower 37.20\n", + "Freq1 60.13\n", + "ActiveEnergyDelvd 979666.38\n", + "PowerFactor 1.99\n", + "Voltage_L_N 282.77\n", + "Voltage_L_L 489.77\n", + "Current 68.84\n", + "B008_SpaceTemp 71.57\n", + "TAB_103_SpaceTemp 74.42\n", + "TAB_115_SpaceTemp 74.60\n", + "TAB_125_SpaceTemp 75.30\n", + "dtype: float64\n" + ] + } + ], + "source": [ + "with pd.option_context('display.max_rows', None, 'display.max_columns', None):\n", + " print(\"\\nMax Values:\")\n", + " print(df.max(numeric_only=True))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Set the time stamp column as the Pandas DataFrame Index" + "This AHU, as observed in the screenshot, features DX cooling controlled by an analog signal, likely indicating a variable-capacity scroll compressor rather than a binary staged compressor. Open-fdd faults, when calculating operating modes, require a cooling signal to identify when the AHU is in a cooling mode. The code snippet below calculates a `cooling_signal` as a Pandas series, with values ranging from 0.0 to 1.0 to represent the percentage command of the analog output.\n", + "\n", + "The calculations might seem unconventional, but they are designed to provide the rule-based logic with an appropriate 'mode' for the AHU based on the DX cooling signals. This calculation ensures that when the DX cooling is active, the AHU is correctly identified as being in a cooling mode. Additionally, some faults check whether the cooling signal is at 100% but there's insufficient drop in the AHU supply air temperature to flag a fault. This calculation ensures that the `cooling_signal` will approach 1.0 or 100% when the DX cooling is fully engaged but not achieving the setpoint." ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " DX1 DX2 cooling_signal\n", + "timestamp \n", + "2023-10-01 00:00:00 100.00 14.05 0.57025\n", + "2023-10-01 00:05:00 100.00 22.61 0.61305\n", + "2023-10-01 00:10:00 100.00 23.33 0.61665\n", + "2023-10-01 00:15:00 100.00 38.41 0.69205\n", + "2023-10-01 00:20:00 100.00 38.41 0.69205\n", + "... ... ... ...\n", + "2024-07-31 23:35:00 0.00 0.00 0.00000\n", + "2024-07-31 23:40:00 72.43 0.00 0.36215\n", + "2024-07-31 23:45:00 74.15 0.00 0.37075\n", + "2024-07-31 23:50:00 74.05 0.00 0.37025\n", + "2024-07-31 23:55:00 0.00 0.00 0.00000\n", + "\n", + "[105589 rows x 3 columns]\n" + ] + } + ], + "source": [ + "# Step 1: Convert DX1 and DX2 to analog signals\n", + "df['DX1_analog'] = df['DX1'] / 100.0 # DX1 is already 0.5 when DX1 is 100\n", + "df['DX2_analog'] = df['DX2'] / 100.0 # DX2 scales from 0.0 to 1.0\n", + "\n", + "# Step 2: Create cooling_signal\n", + "df['cooling_signal'] = df['DX1_analog'] * 0.5 + df['DX2_analog'] * 0.5\n", + "\n", + "# Optional: Drop the intermediate columns if not needed\n", + "df = df.drop(columns=['DX1_analog', 'DX2_analog'])\n", + "\n", + "# View the resulting DataFrame\n", + "print(df[['DX1', 'DX2', 'cooling_signal']])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Caveat on Analog Output Data which would be a unit in percent command or position\n", + "\n", + "As seen above printing the max values, please check columns that represent float values ranging from 0 to 100.0 for the control system's analog outputs. Open-fdd expects these values to be in the range of 0 to 1.0 to represent percentages. Some control system data is provided as 0 to 100.0, while others are in the 0 to 1.0 range. If you pass a 0 to 100.0 value for the control system's analog outputs, the check_analog_pct method will flag this as a `ValueError`. This method checks the data type and range of these values, raising an error if the maximum value exceeds 1.0, ensuring that the data conforms to the expected format. Under the hood logic monitors analog outputs to know the operating state (OS) of the AHU based on heating, cooling, and economizer percent commands as well as supply fan speed reference to know when the AHU is running. ASHRAE and NIST use operating states 0, 1, 2, 3 to represent AHU in a heating mode, economizer, economizer plus mechanical cooling, and mechanical cooling only modes.\n", + "\n", + "```python\n", + "SA_FanVFD 100.00\n", + "OA_RA_Damper 100.00\n", + "SA_FanSpeed 80.25\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, "outputs": [ { "data": { @@ -1111,19 +1179,34 @@ "[5 rows x 74 columns]" ] }, - "execution_count": 13, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# Convert the timestamp column to datetime and set it as the index\n", - "df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n", - "df.set_index(\"timestamp\", inplace=True)\n", + "# convert only the AHU data\n", + "\n", + "# the floats between 0.0 and 100.0 so we need to convert to 0.0 and 1.0 ranges\n", + "percentage_columns = [\n", + " [\"SA_FanVFD\"],\n", + " [\"OA_RA_Damper\"],\n", + " [\"SA_FanSpeed\"],\n", + "]\n", + "\n", + "for col in percentage_columns:\n", + " df[col] = df[col] / 100.0\n", "\n", "df.head()" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Set the time stamp column as the Pandas DataFrame Index" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -1137,7 +1220,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ @@ -1210,7 +1293,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -1221,7 +1304,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -1254,7 +1337,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -1309,7 +1392,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -1389,7 +1472,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -1459,7 +1542,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": {}, "outputs": [ { @@ -1553,7 +1636,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": {}, "outputs": [ { @@ -1603,7 +1686,7 @@ " dtype='object')" ] }, - "execution_count": 21, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -1888,7 +1971,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 27, "metadata": {}, "outputs": [ { @@ -1966,7 +2049,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 28, "metadata": {}, "outputs": [ { @@ -2027,7 +2110,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 29, "metadata": {}, "outputs": [ { @@ -2088,7 +2171,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 30, "metadata": {}, "outputs": [ { @@ -2149,7 +2232,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 31, "metadata": {}, "outputs": [ { @@ -2217,7 +2300,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 32, "metadata": {}, "outputs": [ { @@ -2227,7 +2310,7 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mMissingColumnError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[36], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mopen_fdd\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mair_handling_unit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mreports\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FaultCodeFourteenReport\n\u001b[0;32m 4\u001b[0m \u001b[38;5;66;03m# Create an instance of FaultConditionFourteen\u001b[39;00m\n\u001b[1;32m----> 5\u001b[0m fc14 \u001b[38;5;241m=\u001b[39m \u001b[43mFaultConditionFourteen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_dict\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;66;03m# Retrieve and print the required columns for this fault condition\u001b[39;00m\n\u001b[0;32m 8\u001b[0m fc14_required_columns \u001b[38;5;241m=\u001b[39m fc14\u001b[38;5;241m.\u001b[39mget_required_columns()\n", + "Cell \u001b[1;32mIn[32], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mopen_fdd\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mair_handling_unit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mreports\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FaultCodeFourteenReport\n\u001b[0;32m 4\u001b[0m \u001b[38;5;66;03m# Create an instance of FaultConditionFourteen\u001b[39;00m\n\u001b[1;32m----> 5\u001b[0m fc14 \u001b[38;5;241m=\u001b[39m \u001b[43mFaultConditionFourteen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_dict\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;66;03m# Retrieve and print the required columns for this fault condition\u001b[39;00m\n\u001b[0;32m 8\u001b[0m fc14_required_columns \u001b[38;5;241m=\u001b[39m fc14\u001b[38;5;241m.\u001b[39mget_required_columns()\n", "File \u001b[1;32mc:\\Users\\bbartling\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\open_fdd\\air_handling_unit\\faults\\__init__.py:1994\u001b[0m, in \u001b[0;36mFaultConditionFourteen.__init__\u001b[1;34m(self, dict_)\u001b[0m\n\u001b[0;32m 1992\u001b[0m \u001b[38;5;66;03m# Check if any of the required columns are None\u001b[39;00m\n\u001b[0;32m 1993\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28many\u001b[39m(col \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01mfor\u001b[39;00m col \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequired_columns):\n\u001b[1;32m-> 1994\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m MissingColumnError(\n\u001b[0;32m 1995\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39merror_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 1996\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mequation_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 1997\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdescription_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 1998\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequired_column_description\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 1999\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequired_columns\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 2000\u001b[0m )\n\u001b[0;32m 2002\u001b[0m \u001b[38;5;66;03m# Ensure all required columns are strings\u001b[39;00m\n\u001b[0;32m 2003\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequired_columns \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28mstr\u001b[39m(col) \u001b[38;5;28;01mfor\u001b[39;00m col \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequired_columns]\n", "\u001b[1;31mMissingColumnError\u001b[0m: One or more required columns are missing or None \nfc14_flag = 1 if ΔT_coil >= √(εcoil_enter² + εcoil_leave²) + ΔT_fan in inactive cooling coil mode for N consecutive values else 0 \nFault Condition 14: Temperature drop across inactive cooling coil detected, requiring coil leaving temperature sensor \nRequired inputs are the cooling coil entering temperature, cooling coil leaving temperature, cooling signal, heating signal, economizer signal, and supply fan VFD speed \n[None, None, 'cooling_signal', None, 'OA_RA_Damper', 'SA_FanSpeed']" ] @@ -2270,7 +2353,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 33, "metadata": {}, "outputs": [ { @@ -2280,7 +2363,7 @@ "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mInvalidParameterError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[37], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mopen_fdd\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mair_handling_unit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mreports\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FaultCodeFifteenReport\n\u001b[0;32m 4\u001b[0m \u001b[38;5;66;03m# Create an instance of FaultConditionFifteen\u001b[39;00m\n\u001b[1;32m----> 5\u001b[0m fc15 \u001b[38;5;241m=\u001b[39m \u001b[43mFaultConditionFifteen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_dict\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;66;03m# Retrieve and print the required columns for this fault condition\u001b[39;00m\n\u001b[0;32m 8\u001b[0m fc15_required_columns \u001b[38;5;241m=\u001b[39m fc15\u001b[38;5;241m.\u001b[39mget_required_columns()\n", + "Cell \u001b[1;32mIn[33], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mopen_fdd\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mair_handling_unit\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mreports\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FaultCodeFifteenReport\n\u001b[0;32m 4\u001b[0m \u001b[38;5;66;03m# Create an instance of FaultConditionFifteen\u001b[39;00m\n\u001b[1;32m----> 5\u001b[0m fc15 \u001b[38;5;241m=\u001b[39m \u001b[43mFaultConditionFifteen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_dict\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 7\u001b[0m \u001b[38;5;66;03m# Retrieve and print the required columns for this fault condition\u001b[39;00m\n\u001b[0;32m 8\u001b[0m fc15_required_columns \u001b[38;5;241m=\u001b[39m fc15\u001b[38;5;241m.\u001b[39mget_required_columns()\n", "File \u001b[1;32mc:\\Users\\bbartling\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\open_fdd\\air_handling_unit\\faults\\__init__.py:2110\u001b[0m, in \u001b[0;36mFaultConditionFifteen.__init__\u001b[1;34m(self, dict_)\u001b[0m\n\u001b[0;32m 2104\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m param, value \u001b[38;5;129;01min\u001b[39;00m [\n\u001b[0;32m 2105\u001b[0m (\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdelta_supply_fan\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdelta_supply_fan),\n\u001b[0;32m 2106\u001b[0m (\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcoil_temp_enter_err_thres\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcoil_temp_enter_err_thres),\n\u001b[0;32m 2107\u001b[0m (\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcoil_temp_leav_err_thres\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcoil_temp_leav_err_thres),\n\u001b[0;32m 2108\u001b[0m ]:\n\u001b[0;32m 2109\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(value, \u001b[38;5;28mfloat\u001b[39m):\n\u001b[1;32m-> 2110\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m InvalidParameterError(\n\u001b[0;32m 2111\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThe parameter \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mparam\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m should be a float, but got \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(value)\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 2112\u001b[0m )\n\u001b[0;32m 2114\u001b[0m \u001b[38;5;66;03m# Other attributes\u001b[39;00m\n\u001b[0;32m 2115\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhtg_coil_enter_temp_col \u001b[38;5;241m=\u001b[39m dict_\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mHTG_COIL_ENTER_TEMP_COL\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n", "\u001b[1;31mInvalidParameterError\u001b[0m: The parameter 'delta_supply_fan' should be a float, but got NoneType." ] @@ -2321,9 +2404,16 @@ " print(\"No faults found.\")\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These are the faults we are left to work with that did not error out" + ] + }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 34, "metadata": {}, "outputs": [ { @@ -2351,7 +2441,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 35, "metadata": {}, "outputs": [ { @@ -2369,7 +2459,7 @@ " 'fc13_fault_sum': np.int64(0)}" ] }, - "execution_count": 39, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -2389,7 +2479,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 36, "metadata": {}, "outputs": [ { @@ -2453,7 +2543,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 37, "metadata": {}, "outputs": [ { @@ -2499,7 +2589,36 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "See other examples of reporting features built into open-fdd for plotting specific equation data. If needed, you can also use any Python-based data visualization tools, such as those available in Pandas. However, open-fdd includes built-in reporting features designed to make data analysis easier and faster." + "Heat maps are effective tools for highlighting potential issues that may be influenced by the time of year, particularly for an AHU, where outdoor conditions significantly impact its operating mode. Some AHU programming may incorporate different PID tuning parameters for various operating states. While one mode might transition smoothly between mechanical cooling, economizing, or heating, improper tuning across all states can lead to oscillations or hunting, especially if adjustments were not made for each operating condition." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Similarly, as demonstrated in this example, when there is an overwhelming amount of data, it can be difficult to pinpoint specific issues. To address this, consider analyzing faults on a seasonal or monthly basis, which allows for a more focused examination and clearer identification of underlying problems.\n", + "\n", + "For example you could do something along the lines of this below to create a smaller dataset to work with.\n", + "\n", + "```python\n", + "# Create DataFrame\n", + "df = pd.DataFrame(data)\n", + "\n", + "# Convert timestamp to datetime\n", + "df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n", + "\n", + "# Set the timestamp as the index\n", + "df.set_index(\"timestamp\", inplace=True)\n", + "\n", + "# Example of breaking out the DataFrame by month\n", + "dfs_by_month = {month: data for month, data in df.groupby(df.index.to_period(\"M\"))}\n", + "\n", + "# Example of accessing the DataFrame for March 2023\n", + "march_df = dfs_by_month[pd.Period(\"2023-03\")]\n", + "\n", + "print(\"March 2023 DataFrame:\")\n", + "print(march_df)\n", + "```" ] } ], diff --git a/examples/csv_data_source/ahu_individual_faults_blank.ipynb b/examples/csv_data_source/ahu_individual_faults_blank.ipynb index 3dfb023..83600cb 100644 --- a/examples/csv_data_source/ahu_individual_faults_blank.ipynb +++ b/examples/csv_data_source/ahu_individual_faults_blank.ipynb @@ -18,6 +18,25 @@ "# pip install open-fdd --upgrade" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# display BAS screenshots\n", + "\n", + "from IPython.display import Image, display\n", + "\n", + "# Specify the path to your JPEG file\n", + "image_path1 = r\"C:\\Users\\bbartling\\Documents\\AHU.jpg\"\n", + "image_path2 = r\"C:\\Users\\bbartling\\Documents\\AHU_points.jpg\"\n", + "\n", + "# Display the images\n", + "display(Image(filename=image_path1))\n", + "display(Image(filename=image_path2))" + ] + }, { "cell_type": "code", "execution_count": 25, @@ -35,12 +54,57 @@ "outputs": [], "source": [ "# Load your data\n", - "ahu_data = r\"C:\\Users\\bbartling\\Documents\\Midtown_PD_Master.csv\"\n", + "ahu_data = r\"C:\\Users\\bbartling\\Documents\\data.csv\"\n", "df = pd.read_csv(ahu_data)\n", "\n", "df.head()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n", + "df.set_index(\"timestamp\", inplace=True)\n", + "\n", + "df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# OPTIONAL is make smaller monthly datasets\n", + "\n", + "'''\n", + "# Example of breaking out the DataFrame by month\n", + "dfs_by_month = {month: data for month, data in df.groupby(df.index.to_period(\"M\"))}\n", + "\n", + "# Example of accessing the DataFrame for March 2023\n", + "march_df = dfs_by_month[pd.Period(\"2023-03\")]\n", + "\n", + "print(\"March 2023 DataFrame:\")\n", + "print(march_df)\n", + "'''" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from open_fdd.air_handling_unit.faults.helper_utils import HelperUtils\n", + "\n", + "utils = HelperUtils()\n", + "\n", + "df = utils.apply_rolling_average_if_needed(df)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -69,7 +133,11 @@ "# Get the description of the 'OA_Damper' column\n", "oa_damper_description = filtered_df['OA_RA_Damper'].describe()\n", "\n", - "# Print the description\n", + "'''\n", + "this can potentially be used to find the AHU MIN OA damper position\n", + "which is a required input in the config_dict below as a float\n", + "between 0.0 and 1.0. IE., an AHU with a min OA of 20% would be 0.2\n", + "'''\n", "oa_damper_description" ] }, @@ -101,7 +169,7 @@ "metadata": {}, "outputs": [], "source": [ - "# OPTIONAL CONVERT AO's based on above info\n", + "# OPTIONAL CONVERT AO's based on above info if they are floats between 0.0 and 100.0\n", "\n", "'''\n", "\n", @@ -120,19 +188,6 @@ "'''" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Convert the timestamp column to datetime and set it as the index\n", - "df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n", - "df.set_index(\"timestamp\", inplace=True)\n", - "\n", - "df.head()" - ] - }, { "cell_type": "code", "execution_count": null, @@ -200,8 +255,8 @@ " 'DELTA_T_SUPPLY_FAN': 2.0,\n", "\n", " 'DELTA_OS_MAX': 3,\n", - " 'AHU_MIN_OA_DPR': 0.20, # Found from the previous summary stats\n", - " 'OAT_RAT_DELTA_MIN': 10, # Intentional as type int to show new error handling in fc6\n", + " 'AHU_MIN_OA_DPR': 0.20,\n", + " 'OAT_RAT_DELTA_MIN': 10.0,\n", " 'AIRFLOW_ERR_THRES': 0.3,\n", " 'AHU_MIN_OA_CFM_DESIGN': 2500,\n", " 'TROUBLESHOOT_MODE': False,\n",