diff --git a/docs/pestutilslib/docx_to_md.py b/docs/pestutilslib/docx_to_md.py
index 5944137..e8887b8 100644
--- a/docs/pestutilslib/docx_to_md.py
+++ b/docs/pestutilslib/docx_to_md.py
@@ -1,30 +1,30 @@
import sys
import os
+
def processFile(inFile, outFile):
- mdFile = open(inFile, 'r')
+ mdFile = open(inFile, "r")
toc = []
- levels = [0,0,0,0,0 ]
- newFile = open(outFile, 'w')
+ levels = [0, 0, 0, 0, 0]
+ newFile = open(outFile, "w")
tempFile = []
tocLoc = 0
partOfToc = False
-
+
for line in mdFile:
- if partOfToc and line != '\n':
+ if partOfToc and line != "\n":
continue
else:
partOfToc = False
- if 'Table of Contents' in line:
+ if "Table of Contents" in line:
tocLoc = len(tempFile) + 1
partOfToc = True
line += "\n"
- elif line[0] == '#':
+ elif line[0] == "#":
secId = buildToc(line, toc, levels)
- line = addSectionTag(cleanLine(line), secId) + '\n'
+ line = addSectionTag(cleanLine(line), secId) + "\n"
tempFile.append(line)
-
for line in toc:
tempFile.insert(tocLoc, line)
tocLoc += 1
@@ -36,58 +36,75 @@ def processFile(inFile, outFile):
mdFile.close()
newFile.close()
+
def addSectionTag(line, secId):
- startIndex = line.find(' ')
- line = line[:startIndex + 1] + '' + line[startIndex + 1:]
+ startIndex = line.find(" ")
+ line = line[: startIndex + 1] + "" + line[startIndex + 1 :]
return line
+
def buildToc(line, toc, levels):
line = cleanLine(line)
- secId = 's'
- if line[:4] == '####':
-
- #raise UserWarning('Header levels greater than 3 not supported')
+ secId = "s"
+ if line[:4] == "####":
+ # raise UserWarning('Header levels greater than 3 not supported')
levels[4] += 1
- secId += str(levels[1]) + '-' + str(levels[2]) + '-' + str(levels[3]) + '-' + str(levels[4])
- toc.append(' - [' + line[5:] + '](#' + secId + ')\n')
- elif line[:3] == '###':
+ secId += (
+ str(levels[1])
+ + "-"
+ + str(levels[2])
+ + "-"
+ + str(levels[3])
+ + "-"
+ + str(levels[4])
+ )
+ toc.append(" - [" + line[5:] + "](#" + secId + ")\n")
+ elif line[:3] == "###":
levels[3] += 1
- secId += str(levels[1]) + '-' + str(levels[2]) + '-' + str(levels[3])
- toc.append(' - [' + line[4:] + '](#' + secId + ')\n')
- elif line[:2] == '##':
+ secId += str(levels[1]) + "-" + str(levels[2]) + "-" + str(levels[3])
+ toc.append(" - [" + line[4:] + "](#" + secId + ")\n")
+ elif line[:2] == "##":
levels[2] += 1
levels[3] = 0
- secId += str(levels[1]) + '-' + str(levels[2])
- toc.append(' - [' + line[3:] + '](#' + secId + ')\n')
- elif line[:1] == '#':
+ secId += str(levels[1]) + "-" + str(levels[2])
+ toc.append(" - [" + line[3:] + "](#" + secId + ")\n")
+ elif line[:1] == "#":
levels[1] += 1
levels[3] = levels[2] = 0
secId += str(levels[1])
- toc.append('- [' + line[2:] + '](#' + secId + ')\n')
+ toc.append("- [" + line[2:] + "](#" + secId + ")\n")
return secId
+
def cleanLine(text):
text = stripNewline(text)
text = removeAnchors(text)
return text
+
def stripNewline(text):
- return text.replace('\n', '')
+ return text.replace("\n", "")
+
def removeAnchors(text):
- while ('<' in text and '>' in text):
- leftTag = text.index('<')
- rightTag = text.index('>')
- text = text[0:leftTag] + text[rightTag + 1:]
+ while "<" in text and ">" in text:
+ leftTag = text.index("<")
+ rightTag = text.index(">")
+ text = text[0:leftTag] + text[rightTag + 1 :]
return text
-def clean(docx_file,inFile,outFile,run_pandoc=True):
+
+def clean(docx_file, inFile, outFile, run_pandoc=True):
if run_pandoc:
- os.system("pandoc -t gfm --wrap=none --extract-media . -o file.md {0} --mathjax".format(docx_file))
- num_str = [str(i) for i in range(1,11)]
- lines = open(inFile,'r').readlines()
+ os.system(
+ "pandoc -t gfm --wrap=none --extract-media . -o file.md {0} --mathjax".format(
+ docx_file
+ )
+ )
+ num_str = [str(i) for i in range(1, 11)]
+ lines = open(inFile, "r").readlines()
- #notoc_lines = []
+ # notoc_lines = []
i = 0
# while i < len(lines):
# line = lines[i]
@@ -104,31 +121,40 @@ def clean(docx_file,inFile,outFile,run_pandoc=True):
# notoc_lines = []
i = 0
while i < len(lines):
- #if lines[i].strip().startswith("----"):
+ # if lines[i].strip().startswith("----"):
# lines[i-1] = "## " + lines[i-1]
# lines[i] = "\n"
if "" in lines[i]:
- lines[i] = lines[i].replace("
","").replace("
","
").replace("#","~")
+ lines[i] = (
+ lines[i].replace("", "").replace("
", "
").replace("#", "~")
+ )
if "blockquote" in lines[i]:
- lines[i] = lines[i].replace("","").replace("
","")
+ lines[i] = lines[i].replace("", "").replace("
", "")
if lines[i].strip().startswith("$") and not "bmatrix" in lines[i].lower():
label = lines[i].split()[-1]
- eq_str = lines[i].replace("$$","$").split('$')[1]
- eq_str = r"{0}".format(eq_str).replace("\\\\","\\").replace(" \\ "," ").replace("\\_","_")
- math_str_pre = " {1}
".format(eq_str,label)
+ eq_str = lines[i].replace("$$", "$").split("$")[1]
+ eq_str = (
+ r"{0}".format(eq_str)
+ .replace("\\\\", "\\")
+ .replace(" \\ ", " ")
+ .replace("\\_", "_")
+ )
+ math_str_pre = ' {1}
'.format(eq_str, label)
lines[i] = math_str_pre + " " + math_str_post
- if lines[i].strip().startswith(""): # and "pcf" in lines[i].lower():
- lines[i] = "" + lines[i] + "
"
+ if lines[i].strip().startswith(""): # and "pcf" in lines[i].lower():
+ lines[i] = '' + lines[i] + "
"
if "comment" in lines[i].lower():
- lines[i] = lines[i].replace("~","#")
+ lines[i] = lines[i].replace("~", "#")
elif lines[i].strip().startswith("[") and "]" in lines[i]:
- raw = lines[i].strip().split(']')
- rraw = " ".join(raw[0].split()[:-1])+"]"
+ raw = lines[i].strip().split("]")
+ rraw = " ".join(raw[0].split()[:-1]) + "]"
new_line = rraw + "".join(raw[1:]) + "\n"
- print(lines[i],new_line)
+ print(lines[i], new_line)
lines[i] = new_line
# elif "bmatrix" in lines[i].lower():
# eq_str = lines[i]
@@ -146,20 +172,18 @@ def clean(docx_file,inFile,outFile,run_pandoc=True):
i += 1
- #lines[0] = "# Table of Contents\n"
- with open(outFile,'w') as f:
-
+ # lines[0] = "# Table of Contents\n"
+ with open(outFile, "w") as f:
for line in lines:
- #if line.lower().strip().startswith("[introduction"):
+ # if line.lower().strip().startswith("[introduction"):
# f.write("\n# Table of Contents\n\n")
f.write(line)
if __name__ == "__main__":
-
- clean("new_function_documentation.docx","file.md","fortran_library_documentation.md",True)
-
-
-
-
-
+ clean(
+ "new_function_documentation.docx",
+ "file.md",
+ "fortran_library_documentation.md",
+ True,
+ )
diff --git a/examples/clear_output_all.py b/examples/clear_output_all.py
index 05843b5..169d7e8 100644
--- a/examples/clear_output_all.py
+++ b/examples/clear_output_all.py
@@ -1,10 +1,17 @@
import os
+
dirs = ["."]
notebook_count = 0
for d in dirs:
- nb_files = [os.path.join(d,f) for f in os.listdir(d) if f.lower().endswith(".ipynb")]
+ nb_files = [
+ os.path.join(d, f) for f in os.listdir(d) if f.lower().endswith(".ipynb")
+ ]
for nb_file in nb_files:
- print("clearing",nb_file)
- os.system("jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --inplace {0}".format(nb_file))
+ print("clearing", nb_file)
+ os.system(
+ "jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --inplace {0}".format(
+ nb_file
+ )
+ )
notebook_count += 1
-print(notebook_count," notebooks cleared")
+print(notebook_count, " notebooks cleared")
diff --git a/examples/exploring_lowlevel_pypestutils_functions.ipynb b/examples/exploring_lowlevel_pypestutils_functions.ipynb
index 7fd2e72..419374f 100644
--- a/examples/exploring_lowlevel_pypestutils_functions.ipynb
+++ b/examples/exploring_lowlevel_pypestutils_functions.ipynb
@@ -46,7 +46,7 @@
"w_d = \"freyberg_explore_lowlevel\"\n",
"if os.path.exists(w_d):\n",
" shutil.rmtree(w_d)\n",
- "shutil.copytree(org_d,w_d)"
+ "shutil.copytree(org_d, w_d)"
]
},
{
@@ -88,6 +88,7 @@
"outputs": [],
"source": [
"from pypestutils.pestutilslib import PestUtilsLib\n",
+ "\n",
"lib = PestUtilsLib()"
]
},
@@ -98,7 +99,9 @@
"metadata": {},
"outputs": [],
"source": [
- "grid_info = lib.install_mf6_grid_from_file(\"structgrid\",os.path.join(w_d,\"freyberg6.dis.grb\"))\n"
+ "grid_info = lib.install_mf6_grid_from_file(\n",
+ " \"structgrid\", os.path.join(w_d, \"freyberg6.dis.grb\")\n",
+ ")"
]
},
{
@@ -154,7 +157,7 @@
"metadata": {},
"outputs": [],
"source": [
- "easting,northing,elev = lib.get_cell_centres_mf6(\"structgrid\",grid_info[\"ncells\"])"
+ "easting, northing, elev = lib.get_cell_centres_mf6(\"structgrid\", grid_info[\"ncells\"])"
]
},
{
@@ -164,10 +167,10 @@
"metadata": {},
"outputs": [],
"source": [
- "easting = easting[:nrow*ncol]\n",
- "northing = northing[:nrow*ncol]\n",
- "Easting = easting.reshape((nrow,ncol))\n",
- "Northing = northing.reshape((nrow,ncol))"
+ "easting = easting[: nrow * ncol]\n",
+ "northing = northing[: nrow * ncol]\n",
+ "Easting = easting.reshape((nrow, ncol))\n",
+ "Northing = northing.reshape((nrow, ncol))"
]
},
{
@@ -189,7 +192,7 @@
"metadata": {},
"outputs": [],
"source": [
- "hds_file = [os.path.join(w_d,f) for f in os.listdir(w_d) if f.endswith(\".hds\")][0]\n",
+ "hds_file = [os.path.join(w_d, f) for f in os.listdir(w_d) if f.endswith(\".hds\")][0]\n",
"hds_file"
]
},
@@ -208,7 +211,9 @@
"metadata": {},
"outputs": [],
"source": [
- "depvar_file_info = lib.inquire_modflow_binary_file_specs(hds_file,hds_file+\".out\",31,1)"
+ "depvar_file_info = lib.inquire_modflow_binary_file_specs(\n",
+ " hds_file, hds_file + \".out\", 31, 1\n",
+ ")"
]
},
{
@@ -236,7 +241,7 @@
"metadata": {},
"outputs": [],
"source": [
- "df = pd.read_csv(hds_file+\".out\")\n",
+ "df = pd.read_csv(hds_file + \".out\")\n",
"df.columns = [c.lower() for c in df.columns]\n",
"df"
]
@@ -256,7 +261,9 @@
"metadata": {},
"outputs": [],
"source": [
- "hdsdf = pd.read_csv(os.path.join(\"freyberg_aux_files\",\"gwlevel_obs.csv\"),parse_dates=[\"datetime\"])\n",
+ "hdsdf = pd.read_csv(\n",
+ " os.path.join(\"freyberg_aux_files\", \"gwlevel_obs.csv\"), parse_dates=[\"datetime\"]\n",
+ ")\n",
"hdsdf"
]
},
@@ -268,7 +275,7 @@
"outputs": [],
"source": [
"mtop = m.dis.top.array\n",
- "mtop[mtop<-100] = np.nan"
+ "mtop[mtop < -100] = np.nan"
]
},
{
@@ -278,15 +285,15 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,axes = plt.subplots(1,2,figsize=(10,10))\n",
- "for lay,ax in zip([1,3],axes):\n",
+ "fig, axes = plt.subplots(1, 2, figsize=(10, 10))\n",
+ "for lay, ax in zip([1, 3], axes):\n",
" ax.set_aspect(\"equal\")\n",
- " kdf = hdsdf.loc[hdsdf.layer==lay,:]\n",
+ " kdf = hdsdf.loc[hdsdf.layer == lay, :]\n",
" assert kdf.shape[0] > 0\n",
- " ax.pcolormesh(Easting,Northing,mtop)\n",
- " ax.scatter(kdf.x,kdf.y,marker=\"^\",c=\"k\",label=\"gw level loc\")\n",
+ " ax.pcolormesh(Easting, Northing, mtop)\n",
+ " ax.scatter(kdf.x, kdf.y, marker=\"^\", c=\"k\", label=\"gw level loc\")\n",
" ax.legend(loc=\"upper left\")\n",
- " ax.set_title(\"gw level locations in layer {0}\".format(lay),loc=\"left\")\n"
+ " ax.set_title(\"gw level locations in layer {0}\".format(lay), loc=\"left\")"
]
},
{
@@ -323,8 +330,8 @@
"metadata": {},
"outputs": [],
"source": [
- "fac_file = os.path.join(w_d,\"obs_interp_fac.bin\")\n",
- "bln_file = fac_file.replace(\".bin\",\".bln\")"
+ "fac_file = os.path.join(w_d, \"obs_interp_fac.bin\")\n",
+ "bln_file = fac_file.replace(\".bin\", \".bln\")"
]
},
{
@@ -334,7 +341,15 @@
"metadata": {},
"outputs": [],
"source": [
- "results = lib.calc_mf6_interp_factors(\"structgrid\",usitedf.x.values,usitedf.y.values,usitedf.layer.values,fac_file,\"binary\",bln_file)\n",
+ "results = lib.calc_mf6_interp_factors(\n",
+ " \"structgrid\",\n",
+ " usitedf.x.values,\n",
+ " usitedf.y.values,\n",
+ " usitedf.layer.values,\n",
+ " fac_file,\n",
+ " \"binary\",\n",
+ " bln_file,\n",
+ ")\n",
"results"
]
},
@@ -371,7 +386,17 @@
"metadata": {},
"outputs": [],
"source": [
- "head_results = lib.interp_from_mf6_depvar_file(hds_file,fac_file,\"binary\",depvar_file_info[\"ntime\"],\"head\",1e+10,True,-1.0e+30,usitedf.shape[0])"
+ "head_results = lib.interp_from_mf6_depvar_file(\n",
+ " hds_file,\n",
+ " fac_file,\n",
+ " \"binary\",\n",
+ " depvar_file_info[\"ntime\"],\n",
+ " \"head\",\n",
+ " 1e10,\n",
+ " True,\n",
+ " -1.0e30,\n",
+ " usitedf.shape[0],\n",
+ ")"
]
},
{
@@ -401,8 +426,10 @@
"metadata": {},
"outputs": [],
"source": [
- "start_datetime = pd.to_datetime(\"1-1-2018\") \n",
- "hdsdf.loc[:,\"time\"] = hdsdf.datetime.apply(lambda x: x - start_datetime).dt.days # we are losing fractional days..oh well...\n",
+ "start_datetime = pd.to_datetime(\"1-1-2018\")\n",
+ "hdsdf.loc[:, \"time\"] = hdsdf.datetime.apply(\n",
+ " lambda x: x - start_datetime\n",
+ ").dt.days # we are losing fractional days..oh well...\n",
"hdsdf.time"
]
},
@@ -423,10 +450,10 @@
"source": [
"usite = hdsdf.site.unique()\n",
"usite.sort()\n",
- "usite_dict = {s:c for s,c in zip(usite,np.arange(usite.shape[0],dtype=int))}\n",
- "hdsdf.loc[:,\"isite\"] = hdsdf.site.apply(lambda x: usite_dict[x])\n",
+ "usite_dict = {s: c for s, c in zip(usite, np.arange(usite.shape[0], dtype=int))}\n",
+ "hdsdf.loc[:, \"isite\"] = hdsdf.site.apply(lambda x: usite_dict[x])\n",
"hdsdf.isite\n",
- "hdsdf.sort_values(by=[\"isite\",\"time\"],inplace=True)\n",
+ "hdsdf.sort_values(by=[\"isite\", \"time\"], inplace=True)\n",
"hdsdf"
]
},
@@ -437,7 +464,17 @@
"metadata": {},
"outputs": [],
"source": [
- "ihead_results = lib.interp_to_obstime(head_results[\"nproctime\"],head_results[\"simtime\"],head_results[\"simstate\"],1.e+10,\"L\",35.0,1.0e+30,hdsdf.isite.values,hdsdf.time.values)"
+ "ihead_results = lib.interp_to_obstime(\n",
+ " head_results[\"nproctime\"],\n",
+ " head_results[\"simtime\"],\n",
+ " head_results[\"simstate\"],\n",
+ " 1.0e10,\n",
+ " \"L\",\n",
+ " 35.0,\n",
+ " 1.0e30,\n",
+ " hdsdf.isite.values,\n",
+ " hdsdf.time.values,\n",
+ ")"
]
},
{
@@ -447,7 +484,7 @@
"metadata": {},
"outputs": [],
"source": [
- "hdsdf.loc[:,\"simulated\"] = ihead_results"
+ "hdsdf.loc[:, \"simulated\"] = ihead_results"
]
},
{
@@ -485,7 +522,7 @@
"metadata": {},
"outputs": [],
"source": [
- "easting,northing,elev = lib.get_cell_centres_mf6(\"structgrid\",grid_info[\"ncells\"])"
+ "easting, northing, elev = lib.get_cell_centres_mf6(\"structgrid\", grid_info[\"ncells\"])"
]
},
{
@@ -495,7 +532,7 @@
"metadata": {},
"outputs": [],
"source": [
- "easting.shape,northing.shape"
+ "easting.shape, northing.shape"
]
},
{
@@ -513,8 +550,8 @@
"metadata": {},
"outputs": [],
"source": [
- "easting = easting[:nrow*ncol]\n",
- "northing = northing[:nrow*ncol]"
+ "easting = easting[: nrow * ncol]\n",
+ "northing = northing[: nrow * ncol]"
]
},
{
@@ -536,18 +573,18 @@
"source": [
"# cell area\n",
"area = np.ones_like(easting)\n",
- "#active array\n",
- "active = m.dis.idomain.array[0,:,:].flatten()\n",
+ "# active array\n",
+ "active = m.dis.idomain.array[0, :, :].flatten()\n",
"# property mean\n",
"mean = np.ones_like(easting)\n",
"# property variance\n",
"var = np.ones_like(easting)\n",
"# the variogram range\n",
- "aa = np.ones_like(easting)*1000\n",
+ "aa = np.ones_like(easting) * 1000\n",
"# anisotropy\n",
- "anis = np.ones_like(easting)*5\n",
+ "anis = np.ones_like(easting) * 5\n",
"# bearing\n",
- "bearing = (np.ones_like(easting) * 55)"
+ "bearing = np.ones_like(easting) * 55"
]
},
{
@@ -587,9 +624,23 @@
"source": [
"transform = \"none\"\n",
"variogram_type = \"exp\"\n",
- "power = 1.0 #unused\n",
+ "power = 1.0 # unused\n",
"num_reals = 10\n",
- "reals = lib.fieldgen2d_sva(easting,northing,area,active,mean,var,aa,anis,bearing,transform,variogram_type,power,num_reals)\n",
+ "reals = lib.fieldgen2d_sva(\n",
+ " easting,\n",
+ " northing,\n",
+ " area,\n",
+ " active,\n",
+ " mean,\n",
+ " var,\n",
+ " aa,\n",
+ " anis,\n",
+ " bearing,\n",
+ " transform,\n",
+ " variogram_type,\n",
+ " power,\n",
+ " num_reals,\n",
+ ")\n",
"reals.shape"
]
},
@@ -600,7 +651,7 @@
"metadata": {},
"outputs": [],
"source": [
- "m.dis.top = reals[:,0]\n",
+ "m.dis.top = reals[:, 0]\n",
"m.dis.top.plot()"
]
},
@@ -622,12 +673,26 @@
"lib2 = PestUtilsLib()\n",
"transform = \"none\"\n",
"variogram_type = \"exp\"\n",
- "power = 1.0 #unused\n",
+ "power = 1.0 # unused\n",
"num_reals = 10\n",
"lib2.initialize_randgen(54321)\n",
- "reals = lib2.fieldgen2d_sva(easting,northing,area,active,mean,var,aa,anis,bearing,transform,variogram_type,power,num_reals)\n",
+ "reals = lib2.fieldgen2d_sva(\n",
+ " easting,\n",
+ " northing,\n",
+ " area,\n",
+ " active,\n",
+ " mean,\n",
+ " var,\n",
+ " aa,\n",
+ " anis,\n",
+ " bearing,\n",
+ " transform,\n",
+ " variogram_type,\n",
+ " power,\n",
+ " num_reals,\n",
+ ")\n",
"print(reals)\n",
- "plt.imshow(reals[:,0].reshape((nrow,ncol)))"
+ "plt.imshow(reals[:, 0].reshape((nrow, ncol)))"
]
},
{
@@ -645,7 +710,7 @@
"metadata": {},
"outputs": [],
"source": [
- "bearing = np.add(np.ones((nrow,ncol)),np.atleast_2d(np.arange(ncol)))"
+ "bearing = np.add(np.ones((nrow, ncol)), np.atleast_2d(np.arange(ncol)))"
]
},
{
@@ -687,8 +752,22 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = lib.fieldgen2d_sva(easting,northing,area,active,mean,var,aa,anis,bearing,transform,variogram_type,power,num_reals)\n",
- "r = reals[:,0].reshape((nrow,ncol))\n",
+ "reals = lib.fieldgen2d_sva(\n",
+ " easting,\n",
+ " northing,\n",
+ " area,\n",
+ " active,\n",
+ " mean,\n",
+ " var,\n",
+ " aa,\n",
+ " anis,\n",
+ " bearing,\n",
+ " transform,\n",
+ " variogram_type,\n",
+ " power,\n",
+ " num_reals,\n",
+ ")\n",
+ "r = reals[:, 0].reshape((nrow, ncol))\n",
"plt.imshow(r)"
]
},
@@ -707,24 +786,24 @@
"metadata": {},
"outputs": [],
"source": [
- "Easting = easting.reshape((nrow,ncol))\n",
- "Northing = northing.reshape((nrow,ncol))\n",
- "ppeasting,ppnorthing = [],[]\n",
+ "Easting = easting.reshape((nrow, ncol))\n",
+ "Northing = northing.reshape((nrow, ncol))\n",
+ "ppeasting, ppnorthing = [], []\n",
"ppval = []\n",
"pp_space = 20\n",
- "ib = m.dis.idomain.array[0,:,:]\n",
- "half_pp_space = int(pp_space/2)\n",
- "for i in range(half_pp_space,nrow,pp_space):\n",
- " for j in range(half_pp_space,ncol,pp_space):\n",
- " if ib[i,j] == 0:\n",
+ "ib = m.dis.idomain.array[0, :, :]\n",
+ "half_pp_space = int(pp_space / 2)\n",
+ "for i in range(half_pp_space, nrow, pp_space):\n",
+ " for j in range(half_pp_space, ncol, pp_space):\n",
+ " if ib[i, j] == 0:\n",
" continue\n",
- " ppeasting.append(Easting[i,j])\n",
- " ppnorthing.append(Northing[i,j])\n",
- " ppval.append(r[i,j])\n",
+ " ppeasting.append(Easting[i, j])\n",
+ " ppnorthing.append(Northing[i, j])\n",
+ " ppval.append(r[i, j])\n",
"ppeasting = np.asarray(ppeasting)\n",
"ppnorthing = np.asarray(ppnorthing)\n",
"ppval = np.asarray(ppval)\n",
- "ppeasting.shape,ppnorthing.shape,ppval.shape"
+ "ppeasting.shape, ppnorthing.shape, ppval.shape"
]
},
{
@@ -742,12 +821,12 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,ax = plt.subplots(1,1)\n",
+ "fig, ax = plt.subplots(1, 1)\n",
"ax.set_aspect(\"equal\")\n",
- "ax.pcolormesh(Easting,Northing,r)\n",
+ "ax.pcolormesh(Easting, Northing, r)\n",
"ax.set_title(\"realization\")\n",
"\n",
- "ax.scatter(ppeasting,ppnorthing,marker=\".\",s=50,c='k')"
+ "ax.scatter(ppeasting, ppnorthing, marker=\".\", s=50, c=\"k\")"
]
},
{
@@ -767,15 +846,32 @@
"source": [
"max_pts = 50\n",
"min_pts = 1\n",
- "search_dist = 1.e+10\n",
- "aa_pp = aa * pp_space *10 #?\n",
- "zone_pp = np.ones_like(ppeasting,dtype=int)\n",
- "fac_file = os.path.join(w_d,\"factors.bin\")\n",
+ "search_dist = 1.0e10\n",
+ "aa_pp = aa * pp_space * 10 # ?\n",
+ "zone_pp = np.ones_like(ppeasting, dtype=int)\n",
+ "fac_file = os.path.join(w_d, \"factors.bin\")\n",
"from datetime import datetime\n",
+ "\n",
"s = datetime.now()\n",
- "ipts = lib.calc_kriging_factors_2d(ppeasting,ppnorthing,zone_pp,easting,northing,ib.flatten(),\n",
- " \"exp\",\"ordinary\",aa_pp,anis,bearing,search_dist,max_pts,min_pts,fac_file,\"binary\")\n",
- "\"total points:\",ipts,\" took:\",(datetime.now() - s).total_seconds()"
+ "ipts = lib.calc_kriging_factors_2d(\n",
+ " ppeasting,\n",
+ " ppnorthing,\n",
+ " zone_pp,\n",
+ " easting,\n",
+ " northing,\n",
+ " ib.flatten(),\n",
+ " \"exp\",\n",
+ " \"ordinary\",\n",
+ " aa_pp,\n",
+ " anis,\n",
+ " bearing,\n",
+ " search_dist,\n",
+ " max_pts,\n",
+ " min_pts,\n",
+ " fac_file,\n",
+ " \"binary\",\n",
+ ")\n",
+ "\"total points:\", ipts, \" took:\", (datetime.now() - s).total_seconds()"
]
},
{
@@ -793,7 +889,16 @@
"metadata": {},
"outputs": [],
"source": [
- "result = lib.krige_using_file(fac_file,\"binary\",len(easting),\"ordinary\",\"none\",ppval,np.zeros_like(easting),0)"
+ "result = lib.krige_using_file(\n",
+ " fac_file,\n",
+ " \"binary\",\n",
+ " len(easting),\n",
+ " \"ordinary\",\n",
+ " \"none\",\n",
+ " ppval,\n",
+ " np.zeros_like(easting),\n",
+ " 0,\n",
+ ")"
]
},
{
@@ -803,19 +908,19 @@
"metadata": {},
"outputs": [],
"source": [
- "rr = result[\"targval\"].reshape(nrow,ncol)\n",
- "fig,axes = plt.subplots(1,2)\n",
+ "rr = result[\"targval\"].reshape(nrow, ncol)\n",
+ "fig, axes = plt.subplots(1, 2)\n",
"ax = axes[0]\n",
"ax.set_aspect(\"equal\")\n",
"ax.set_title(\"pp interpolated array\")\n",
- "ax.pcolormesh(Easting,Northing,rr) #the interpolated array\n",
+ "ax.pcolormesh(Easting, Northing, rr) # the interpolated array\n",
"ax = axes[1]\n",
"ax.set_aspect(\"equal\")\n",
"ax.set_title(\"pp locs with sampled values\")\n",
- "id_mask = m.dis.idomain.array[0,:,:].copy().astype(float)\n",
- "id_mask[id_mask!=0] = np.nan\n",
- "ax.pcolormesh(Easting,Northing,id_mask)\n",
- "ax.scatter(ppeasting,ppnorthing,marker=\".\",s=50,c=ppval)\n"
+ "id_mask = m.dis.idomain.array[0, :, :].copy().astype(float)\n",
+ "id_mask[id_mask != 0] = np.nan\n",
+ "ax.pcolormesh(Easting, Northing, id_mask)\n",
+ "ax.scatter(ppeasting, ppnorthing, marker=\".\", s=50, c=ppval)"
]
},
{
diff --git a/examples/exploring_pypestutils_helpers.ipynb b/examples/exploring_pypestutils_helpers.ipynb
index 5b06963..0fa79dc 100644
--- a/examples/exploring_pypestutils_helpers.ipynb
+++ b/examples/exploring_pypestutils_helpers.ipynb
@@ -44,7 +44,7 @@
"w_d = \"freyberg_highlevel_helpers\"\n",
"if os.path.exists(w_d):\n",
" shutil.rmtree(w_d)\n",
- "shutil.copytree(org_d,w_d)"
+ "shutil.copytree(org_d, w_d)"
]
},
{
@@ -62,8 +62,8 @@
"metadata": {},
"outputs": [],
"source": [
- "delc = np.loadtxt(os.path.join(w_d,\"freyberg6.dis_delc.txt\")).flatten()\n",
- "delr = np.loadtxt(os.path.join(w_d,\"freyberg6.dis_delr.txt\")).flatten()"
+ "delc = np.loadtxt(os.path.join(w_d, \"freyberg6.dis_delc.txt\")).flatten()\n",
+ "delr = np.loadtxt(os.path.join(w_d, \"freyberg6.dis_delr.txt\")).flatten()"
]
},
{
@@ -76,8 +76,8 @@
"nrow = delc.shape[0]\n",
"ncol = delr.shape[0]\n",
"nlay = 3\n",
- "ib = np.loadtxt(os.path.join(w_d,\"freyberg6.dis_idomain_layer1.txt\"),dtype=int)\n",
- "ib = ib.flatten().reshape(nrow,ncol)\n",
+ "ib = np.loadtxt(os.path.join(w_d, \"freyberg6.dis_idomain_layer1.txt\"), dtype=int)\n",
+ "ib = ib.flatten().reshape(nrow, ncol)\n",
"plt.imshow(ib)"
]
},
@@ -108,7 +108,7 @@
"metadata": {},
"outputs": [],
"source": [
- "grb_fname = os.path.join(w_d,\"freyberg6.dis.grb\")\n",
+ "grb_fname = os.path.join(w_d, \"freyberg6.dis.grb\")\n",
"os.path.exists(grb_fname)"
]
},
@@ -127,7 +127,7 @@
"metadata": {},
"outputs": [],
"source": [
- "csv_fname = os.path.join(\"freyberg_aux_files\",\"gwlevel_obs.csv\")\n",
+ "csv_fname = os.path.join(\"freyberg_aux_files\", \"gwlevel_obs.csv\")\n",
"assert os.path.exists(csv_fname)\n",
"obsdf = pd.read_csv(csv_fname)\n",
"obsdf"
@@ -148,11 +148,11 @@
"metadata": {},
"outputs": [],
"source": [
- "depvar_fname = os.path.join(w_d,\"freyberg6_freyberg.hds\")\n",
- "model_type = 31 #structured modflow-6 model type\n",
- "start_datetime = \"1-1-2018\" # when the simulation starts\n",
- "depvar_ftype = 1# modflow-6 binary file format: 1 for states, 2 for budgets\n",
- "depvar_name = \"head\"# the variable name in the output file to process/extract"
+ "depvar_fname = os.path.join(w_d, \"freyberg6_freyberg.hds\")\n",
+ "model_type = 31 # structured modflow-6 model type\n",
+ "start_datetime = \"1-1-2018\" # when the simulation starts\n",
+ "depvar_ftype = 1 # modflow-6 binary file format: 1 for states, 2 for budgets\n",
+ "depvar_name = \"head\" # the variable name in the output file to process/extract"
]
},
{
@@ -162,7 +162,9 @@
"metadata": {},
"outputs": [],
"source": [
- "results = helpers.mod2obs_mf6(grb_fname,depvar_fname,csv_fname,model_type,start_datetime,depvar_ftype)"
+ "results = helpers.mod2obs_mf6(\n",
+ " grb_fname, depvar_fname, csv_fname, model_type, start_datetime, depvar_ftype\n",
+ ")"
]
},
{
@@ -185,22 +187,31 @@
"adf = results[\"all_results\"]\n",
"idf = results[\"interpolated_results\"]\n",
"for site in adf.columns:\n",
- " aadf = adf.loc[:,site]\n",
- " aadf.loc[aadf.values >1e29] = np.nan\n",
- " \n",
- " iidf = idf.loc[idf.site==site,:]\n",
- " iidf.loc[iidf.simulated>1e29,\"simulated\"] = np.nan\n",
- " fig,ax = plt.subplots(1,1,figsize=(8,5))\n",
- " ax.plot(aadf.index,aadf.values,\"0.5\",lw=0.5,marker=\"o\",mfc='none',label=\"all sim times\")\n",
- " ax.scatter(iidf.datetime,iidf.obsval,marker=\"^\",c=\"r\",label=\"observed\")\n",
- " ax.scatter(iidf.datetime,iidf.simulated,marker=\"^\",c=\"0.5\",label=\"interp to obs\")\n",
- " ax.set_title(site,loc=\"left\")\n",
+ " aadf = adf.loc[:, site]\n",
+ " aadf.loc[aadf.values > 1e29] = np.nan\n",
+ "\n",
+ " iidf = idf.loc[idf.site == site, :]\n",
+ " iidf.loc[iidf.simulated > 1e29, \"simulated\"] = np.nan\n",
+ " fig, ax = plt.subplots(1, 1, figsize=(8, 5))\n",
+ " ax.plot(\n",
+ " aadf.index,\n",
+ " aadf.values,\n",
+ " \"0.5\",\n",
+ " lw=0.5,\n",
+ " marker=\"o\",\n",
+ " mfc=\"none\",\n",
+ " label=\"all sim times\",\n",
+ " )\n",
+ " ax.scatter(iidf.datetime, iidf.obsval, marker=\"^\", c=\"r\", label=\"observed\")\n",
+ " ax.scatter(\n",
+ " iidf.datetime, iidf.simulated, marker=\"^\", c=\"0.5\", label=\"interp to obs\"\n",
+ " )\n",
+ " ax.set_title(site, loc=\"left\")\n",
" ax.legend(loc=\"upper left\")\n",
" ax.grid()\n",
" plt.tight_layout()\n",
" plt.show()\n",
- " plt.close(fig)\n",
- " "
+ " plt.close(fig)"
]
},
{
@@ -218,7 +229,11 @@
"metadata": {},
"outputs": [],
"source": [
- "[f for f in os.listdir(w_d) if f.endswith(\".csv\") and os.path.split(depvar_fname)[1] in f]"
+ "[\n",
+ " f\n",
+ " for f in os.listdir(w_d)\n",
+ " if f.endswith(\".csv\") and os.path.split(depvar_fname)[1] in f\n",
+ "]"
]
},
{
@@ -252,7 +267,7 @@
"metadata": {},
"outputs": [],
"source": [
- "sr = helpers.SpatialReference(delc=delc,delr=delr,rotation=-55,xul=0,yul=0)\n",
+ "sr = helpers.SpatialReference(delc=delc, delr=delr, rotation=-55, xul=0, yul=0)\n",
"sr.rotation"
]
},
@@ -263,7 +278,7 @@
"metadata": {},
"outputs": [],
"source": [
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,ib)"
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, ib)"
]
},
{
@@ -281,7 +296,7 @@
"metadata": {},
"outputs": [],
"source": [
- "gridspec_fname = os.path.join(w_d,\"grid.spc\")\n",
+ "gridspec_fname = os.path.join(w_d, \"grid.spc\")\n",
"sr.write_gridspec(gridspec_fname)"
]
},
@@ -300,9 +315,9 @@
"metadata": {},
"outputs": [],
"source": [
- "x,y = sr.xcentergrid.flatten(),sr.ycentergrid.flatten()\n",
- "grid_df = pd.DataFrame({\"x\":x,\"y\":y,\"layer\":1})\n",
- "csv_fname = os.path.join(w_d,\"grid.csv\")\n",
+ "x, y = sr.xcentergrid.flatten(), sr.ycentergrid.flatten()\n",
+ "grid_df = pd.DataFrame({\"x\": x, \"y\": y, \"layer\": 1})\n",
+ "csv_fname = os.path.join(w_d, \"grid.csv\")\n",
"grid_df.to_csv(csv_fname)"
]
},
@@ -322,6 +337,7 @@
"outputs": [],
"source": [
"from pypestutils.pestutilslib import PestUtilsLib\n",
+ "\n",
"lib = PestUtilsLib()"
]
},
@@ -340,7 +356,7 @@
"metadata": {},
"outputs": [],
"source": [
- "grb_fname = os.path.join(w_d,\"freyberg6.dis.grb\")\n",
+ "grb_fname = os.path.join(w_d, \"freyberg6.dis.grb\")\n",
"os.path.exists(grb_fname)"
]
},
@@ -352,7 +368,7 @@
"outputs": [],
"source": [
"grid_info = helpers.get_grid_info_from_mf6_grb(grb_fname)\n",
- "grid_info['x'].shape"
+ "grid_info[\"x\"].shape"
]
},
{
@@ -362,8 +378,8 @@
"metadata": {},
"outputs": [],
"source": [
- "xx = grid_info['x'].reshape((nlay,nrow,ncol))\n",
- "yy = grid_info['y'].reshape((nlay,nrow,ncol))"
+ "xx = grid_info[\"x\"].reshape((nlay, nrow, ncol))\n",
+ "yy = grid_info[\"y\"].reshape((nlay, nrow, ncol))"
]
},
{
@@ -373,7 +389,7 @@
"metadata": {},
"outputs": [],
"source": [
- "xx.shape\n"
+ "xx.shape"
]
},
{
@@ -383,9 +399,9 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,ax = plt.subplots(1,1)\n",
+ "fig, ax = plt.subplots(1, 1)\n",
"ax.set_aspect(\"equal\")\n",
- "ax.pcolormesh(xx[0,:,:],yy[0,:,:],ib)"
+ "ax.pcolormesh(xx[0, :, :], yy[0, :, :], ib)"
]
},
{
@@ -404,7 +420,7 @@
"outputs": [],
"source": [
"grid_info = helpers.get_2d_grid_info_from_mf6_grb(grb_fname)\n",
- "grid_info['x'].shape"
+ "grid_info[\"x\"].shape"
]
},
{
@@ -503,9 +519,11 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = helpers.get_2d_pp_info_structured_grid(pp_space=10,gridinfo_fname=gridspec_fname)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,ib)\n",
- "plt.scatter(ppdf.x,ppdf.y)"
+ "ppdf = helpers.get_2d_pp_info_structured_grid(\n",
+ " pp_space=10, gridinfo_fname=gridspec_fname\n",
+ ")\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, ib)\n",
+ "plt.scatter(ppdf.x, ppdf.y)"
]
},
{
@@ -515,9 +533,11 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = helpers.get_2d_pp_info_structured_grid(pp_space=10,gridinfo_fname=grb_fname)\n",
- "plt.pcolormesh(grid_info_2d['x'].reshape((nrow,ncol)),grid_info_2d['y'].reshape((nrow,ncol)),ib)\n",
- "plt.scatter(ppdf.x,ppdf.y)"
+ "ppdf = helpers.get_2d_pp_info_structured_grid(pp_space=10, gridinfo_fname=grb_fname)\n",
+ "plt.pcolormesh(\n",
+ " grid_info_2d[\"x\"].reshape((nrow, ncol)), grid_info_2d[\"y\"].reshape((nrow, ncol)), ib\n",
+ ")\n",
+ "plt.scatter(ppdf.x, ppdf.y)"
]
},
{
@@ -527,9 +547,11 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = helpers.get_2d_pp_info_structured_grid(10,gridspec_fname,array_dict={\"zone\":ib})\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,ib)\n",
- "plt.scatter(ppdf.x,ppdf.y)"
+ "ppdf = helpers.get_2d_pp_info_structured_grid(\n",
+ " 10, gridspec_fname, array_dict={\"zone\": ib}\n",
+ ")\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, ib)\n",
+ "plt.scatter(ppdf.x, ppdf.y)"
]
},
{
@@ -549,8 +571,8 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(gridspec_fname,num_reals=10)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,:])"
+ "reals = helpers.generate_2d_grid_realizations(gridspec_fname, num_reals=10)\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0, :, :])"
]
},
{
@@ -560,8 +582,8 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(grb_fname,num_reals=10)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,:])"
+ "reals = helpers.generate_2d_grid_realizations(grb_fname, num_reals=10)\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0, :, :])"
]
},
{
@@ -579,8 +601,15 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(grid_df,num_reals=10)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,].reshape((nrow,ncol)))"
+ "reals = helpers.generate_2d_grid_realizations(grid_df, num_reals=10)\n",
+ "plt.pcolormesh(\n",
+ " sr.xcentergrid,\n",
+ " sr.ycentergrid,\n",
+ " reals[\n",
+ " 0,\n",
+ " :,\n",
+ " ].reshape((nrow, ncol)),\n",
+ ")"
]
},
{
@@ -598,8 +627,8 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(grb_fname,num_reals=10,zone_array=ib)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,:])"
+ "reals = helpers.generate_2d_grid_realizations(grb_fname, num_reals=10, zone_array=ib)\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0, :, :])"
]
},
{
@@ -617,9 +646,9 @@
"metadata": {},
"outputs": [],
"source": [
- "bearing = np.add(np.ones((nrow,ncol)),np.atleast_2d(np.arange(ncol)))\n",
+ "bearing = np.add(np.ones((nrow, ncol)), np.atleast_2d(np.arange(ncol)))\n",
"plt.imshow(bearing)\n",
- "#bearing.min(),bearing.max()"
+ "# bearing.min(),bearing.max()"
]
},
{
@@ -629,8 +658,15 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(gridspec_fname,num_reals=10,zone_array=ib,variobearing=bearing,varioaniso=10,variorange=1000)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,:])"
+ "reals = helpers.generate_2d_grid_realizations(\n",
+ " gridspec_fname,\n",
+ " num_reals=10,\n",
+ " zone_array=ib,\n",
+ " variobearing=bearing,\n",
+ " varioaniso=10,\n",
+ " variorange=1000,\n",
+ ")\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0, :, :])"
]
},
{
@@ -648,11 +684,11 @@
"metadata": {},
"outputs": [],
"source": [
- "s = 10**(np.sin(np.linspace(0,np.pi*2,nrow)))\n",
- "#plt.plot(s) \n",
- "aniso = np.add(np.ones((nrow,ncol)),np.atleast_2d(s).transpose())\n",
+ "s = 10 ** (np.sin(np.linspace(0, np.pi * 2, nrow)))\n",
+ "# plt.plot(s)\n",
+ "aniso = np.add(np.ones((nrow, ncol)), np.atleast_2d(s).transpose())\n",
"plt.imshow(aniso)\n",
- "#aniso.min(),aniso.max()"
+ "# aniso.min(),aniso.max()"
]
},
{
@@ -662,8 +698,15 @@
"metadata": {},
"outputs": [],
"source": [
- "reals = helpers.generate_2d_grid_realizations(gridspec_fname,num_reals=10,zone_array=ib,variobearing=bearing,varioaniso=aniso,variorange=1000)\n",
- "plt.pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0,:,:])"
+ "reals = helpers.generate_2d_grid_realizations(\n",
+ " gridspec_fname,\n",
+ " num_reals=10,\n",
+ " zone_array=ib,\n",
+ " variobearing=bearing,\n",
+ " varioaniso=aniso,\n",
+ " variorange=1000,\n",
+ ")\n",
+ "plt.pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0, :, :])"
]
},
{
@@ -681,14 +724,14 @@
"metadata": {},
"outputs": [],
"source": [
- "array_dict={\"zone\":ib,\"value\":reals[0,:,:],\"bearing\":bearing,\"aniso\":aniso}\n",
- "ppdf = helpers.get_2d_pp_info_structured_grid(10,gridspec_fname,array_dict=array_dict)\n",
- "fig,axes = plt.subplots(1,2,figsize=(10,10))\n",
- "axes[0].pcolormesh(sr.xcentergrid,sr.ycentergrid,reals[0])\n",
- "axes[0].scatter(ppdf.x,ppdf.y,c=\"k\")\n",
- "axes[1].scatter(ppdf.x,ppdf.y,c=ppdf.value)\n",
+ "array_dict = {\"zone\": ib, \"value\": reals[0, :, :], \"bearing\": bearing, \"aniso\": aniso}\n",
+ "ppdf = helpers.get_2d_pp_info_structured_grid(10, gridspec_fname, array_dict=array_dict)\n",
+ "fig, axes = plt.subplots(1, 2, figsize=(10, 10))\n",
+ "axes[0].pcolormesh(sr.xcentergrid, sr.ycentergrid, reals[0])\n",
+ "axes[0].scatter(ppdf.x, ppdf.y, c=\"k\")\n",
+ "axes[1].scatter(ppdf.x, ppdf.y, c=ppdf.value)\n",
"for ax in axes:\n",
- " ax.set_aspect(\"equal\")\n"
+ " ax.set_aspect(\"equal\")"
]
},
{
@@ -726,13 +769,15 @@
"metadata": {},
"outputs": [],
"source": [
- "interp_results = helpers.interpolate_with_sva_pilotpoints_2d(ppdf,grid_df,zone_array=ib)\n",
- "for tag,arr in interp_results.items():\n",
- " fig,ax = plt.subplots(1,1)\n",
- " \n",
- " cb = ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,arr.reshape((nrow,ncol)))\n",
- " plt.colorbar(cb,ax=ax)\n",
- " ax.set_title(tag,loc=\"left\")"
+ "interp_results = helpers.interpolate_with_sva_pilotpoints_2d(\n",
+ " ppdf, grid_df, zone_array=ib\n",
+ ")\n",
+ "for tag, arr in interp_results.items():\n",
+ " fig, ax = plt.subplots(1, 1)\n",
+ "\n",
+ " cb = ax.pcolormesh(sr.xcentergrid, sr.ycentergrid, arr.reshape((nrow, ncol)))\n",
+ " plt.colorbar(cb, ax=ax)\n",
+ " ax.set_title(tag, loc=\"left\")"
]
},
{
diff --git a/examples/ppu_helpers.py b/examples/ppu_helpers.py
index a839540..9e4795c 100644
--- a/examples/ppu_helpers.py
+++ b/examples/ppu_helpers.py
@@ -1,33 +1,36 @@
import os
import pyemu
+
def setup_pps(d):
cwd = os.getcwd()
os.chdir(d)
apply_pps()
os.chdir(cwd)
+
def apply_pps():
-
from pypestutils import helpers
import numpy as np
import pandas as pd
+
df = pd.read_csv("pp_info.csv")
gridspec_fname = "org.grb"
- for model_file,pp_file in zip(df.model_file,df.pp_file):
+ for model_file, pp_file in zip(df.model_file, df.pp_file):
ppdf = pd.read_csv(pp_file)
- results = helpers.interpolate_with_sva_pilotpoints_2d(ppdf,gridspec_fname,vartransform="log")
+ results = helpers.interpolate_with_sva_pilotpoints_2d(
+ ppdf, gridspec_fname, vartransform="log"
+ )
org_arr = np.loadtxt(model_file)
interp = results["result"]
interp = interp.reshape(org_arr.shape)
new_arr = org_arr * interp
new_arr = new_arr
- new_arr[new_arr<1.0e-10] = 1.0e-10
- np.savetxt(model_file,new_arr,fmt="%15.6E")
- np.savetxt("interp_"+model_file,interp,fmt="%15.6E")
- np.savetxt("log_"+model_file,np.log10(new_arr),fmt="%15.6E")
-
+ new_arr[new_arr < 1.0e-10] = 1.0e-10
+ np.savetxt(model_file, new_arr, fmt="%15.6E")
+ np.savetxt("interp_" + model_file, interp, fmt="%15.6E")
+ np.savetxt("log_" + model_file, np.log10(new_arr), fmt="%15.6E")
+
if __name__ == "__main__":
- setup_pps()
-
\ No newline at end of file
+ setup_pps()
diff --git a/examples/pypestutils_pstfrom_quadtree_mf6.ipynb b/examples/pypestutils_pstfrom_quadtree_mf6.ipynb
index cd185cb..a3fb36a 100644
--- a/examples/pypestutils_pstfrom_quadtree_mf6.ipynb
+++ b/examples/pypestutils_pstfrom_quadtree_mf6.ipynb
@@ -59,10 +59,10 @@
"source": [
"bin_dir = \"quadtree_bin\"\n",
"if os.path.exists(bin_dir):\n",
- " shutil.rmtree(bin_dir)\n",
+ " shutil.rmtree(bin_dir)\n",
"os.makedirs(bin_dir)\n",
- "flopy.utils.get_modflow(bin_dir,downloads_dir=bin_dir)\n",
- "pyemu.utils.get_pestpp(bin_dir,downloads_dir=bin_dir)"
+ "flopy.utils.get_modflow(bin_dir, downloads_dir=bin_dir)\n",
+ "pyemu.utils.get_pestpp(bin_dir, downloads_dir=bin_dir)"
]
},
{
@@ -78,7 +78,7 @@
"metadata": {},
"outputs": [],
"source": [
- "org_model_ws = os.path.join('freyberg_quadtree')\n",
+ "org_model_ws = os.path.join(\"freyberg_quadtree\")\n",
"os.listdir(org_model_ws)"
]
},
@@ -95,7 +95,7 @@
"metadata": {},
"outputs": [],
"source": [
- "id_arr = np.loadtxt(os.path.join(org_model_ws,\"freyberg6.disv_idomain_layer3.txt\"))"
+ "id_arr = np.loadtxt(os.path.join(org_model_ws, \"freyberg6.disv_idomain_layer3.txt\"))"
]
},
{
@@ -114,10 +114,10 @@
"tmp_model_ws = \"temp_ppu_quadtree\"\n",
"if os.path.exists(tmp_model_ws):\n",
" shutil.rmtree(tmp_model_ws)\n",
- "shutil.copytree(org_model_ws,tmp_model_ws)\n",
+ "shutil.copytree(org_model_ws, tmp_model_ws)\n",
"for bin_file in os.listdir(bin_dir):\n",
- " shutil.copy2(os.path.join(bin_dir,bin_file),os.path.join(tmp_model_ws,bin_file))\n",
- "os.listdir(tmp_model_ws)\n"
+ " shutil.copy2(os.path.join(bin_dir, bin_file), os.path.join(tmp_model_ws, bin_file))\n",
+ "os.listdir(tmp_model_ws)"
]
},
{
@@ -137,7 +137,9 @@
"source": [
"sim = flopy.mf6.MFSimulation.load(sim_ws=tmp_model_ws)\n",
"m = sim.get_model(\"freyberg6\")\n",
- "m.modelgrid.set_coord_info(angrot=-55) #for some reason flopy isnt picking up angrot from the disv options...\n"
+ "m.modelgrid.set_coord_info(\n",
+ " angrot=-55\n",
+ ") # for some reason flopy isnt picking up angrot from the disv options..."
]
},
{
@@ -172,10 +174,14 @@
"outputs": [],
"source": [
"template_ws = \"freyberg6_unstruct_template\"\n",
- "pf = pyemu.utils.PstFrom(original_d=tmp_model_ws, new_d=template_ws,\n",
- " remove_existing=True,\n",
- " longnames=True,\n",
- " zero_based=False,start_datetime=\"1-1-2018\")"
+ "pf = pyemu.utils.PstFrom(\n",
+ " original_d=tmp_model_ws,\n",
+ " new_d=template_ws,\n",
+ " remove_existing=True,\n",
+ " longnames=True,\n",
+ " zero_based=False,\n",
+ " start_datetime=\"1-1-2018\",\n",
+ ")"
]
},
{
@@ -193,7 +199,7 @@
"metadata": {},
"outputs": [],
"source": [
- "hdsdf = pd.read_csv(os.path.join(tmp_model_ws,\"heads.csv\"),index_col=0)\n",
+ "hdsdf = pd.read_csv(os.path.join(tmp_model_ws, \"heads.csv\"), index_col=0)\n",
"hdsdf"
]
},
@@ -210,8 +216,13 @@
"metadata": {},
"outputs": [],
"source": [
- "hds_df = pf.add_observations(\"heads.csv\",insfile=\"heads.csv.ins\",index_cols=\"time\",\n",
- " use_cols=list(hdsdf.columns.values),prefix=\"hds\",)\n",
+ "hds_df = pf.add_observations(\n",
+ " \"heads.csv\",\n",
+ " insfile=\"heads.csv.ins\",\n",
+ " index_cols=\"time\",\n",
+ " use_cols=list(hdsdf.columns.values),\n",
+ " prefix=\"hds\",\n",
+ ")\n",
"hds_df"
]
},
@@ -247,7 +258,12 @@
"outputs": [],
"source": [
"df = pd.read_csv(os.path.join(tmp_model_ws, \"sfr.csv\"), index_col=0)\n",
- "sfr_df = pf.add_observations(\"sfr.csv\", insfile=\"sfr.csv.ins\", index_cols=\"time\", use_cols=list(df.columns.values))\n",
+ "sfr_df = pf.add_observations(\n",
+ " \"sfr.csv\",\n",
+ " insfile=\"sfr.csv.ins\",\n",
+ " index_cols=\"time\",\n",
+ " use_cols=list(df.columns.values),\n",
+ ")\n",
"sfr_df"
]
},
@@ -279,7 +295,7 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = pd.read_csv(os.path.join(\"freyberg_aux_files\",\"pp_info.csv\"),index_col=0)\n",
+ "ppdf = pd.read_csv(os.path.join(\"freyberg_aux_files\", \"pp_info.csv\"), index_col=0)\n",
"ppdf"
]
},
@@ -289,13 +305,13 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,ax = plt.subplots(1,1)\n",
+ "fig, ax = plt.subplots(1, 1)\n",
"ax.set_aspect(\"equal\")\n",
- "#ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,top_arr)\n",
- "#ax = m.dis.top.plot()\n",
+ "# ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,top_arr)\n",
+ "# ax = m.dis.top.plot()\n",
"\n",
- "ax.scatter(m.modelgrid.xcellcenters,m.modelgrid.ycellcenters,alpha=0.5,color=\"0.5\")\n",
- "ax.scatter(ppdf.x,ppdf.y,marker=\".\",c=\"r\")\n",
+ "ax.scatter(m.modelgrid.xcellcenters, m.modelgrid.ycellcenters, alpha=0.5, color=\"0.5\")\n",
+ "ax.scatter(ppdf.x, ppdf.y, marker=\".\", c=\"r\")\n",
"plt.tight_layout()"
]
},
@@ -305,7 +321,7 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf.loc[:,\"value\"] = 3.0 # HK in layer 1"
+ "ppdf.loc[:, \"value\"] = 3.0 # HK in layer 1"
]
},
{
@@ -322,14 +338,14 @@
"outputs": [],
"source": [
"id_mask = id_arr.copy()\n",
- "id_mask[id_mask!=0] = np.nan\n",
- "fig,axes = plt.subplots(1,3)\n",
- "for ax,attr in zip(axes,[\"aniso\",\"corrlen\",\"bearing\"]):\n",
- " #ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,id_mask)\n",
+ "id_mask[id_mask != 0] = np.nan\n",
+ "fig, axes = plt.subplots(1, 3)\n",
+ "for ax, attr in zip(axes, [\"aniso\", \"corrlen\", \"bearing\"]):\n",
+ " # ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,id_mask)\n",
" m.dis.top.plot(axes=[ax])\n",
- " ax.scatter(ppdf.x,ppdf.y,c=ppdf.loc[:,attr])\n",
+ " ax.scatter(ppdf.x, ppdf.y, c=ppdf.loc[:, attr])\n",
" ax.set_aspect(\"equal\")\n",
- " ax.set_title(attr,loc=\"left\")\n",
+ " ax.set_title(attr, loc=\"left\")\n",
" ax.set_xticks([])\n",
" ax.set_yticks([])\n",
"plt.tight_layout()"
@@ -348,8 +364,10 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf.loc[:,\"value\"] = np.random.normal(1.0,0.25,ppdf.shape[0])\n",
- "results = ppu.interpolate_with_sva_pilotpoints_2d(ppdf,os.path.join(pf.new_d,\"freyberg6.disv.grb\"))"
+ "ppdf.loc[:, \"value\"] = np.random.normal(1.0, 0.25, ppdf.shape[0])\n",
+ "results = ppu.interpolate_with_sva_pilotpoints_2d(\n",
+ " ppdf, os.path.join(pf.new_d, \"freyberg6.disv.grb\")\n",
+ ")"
]
},
{
@@ -358,16 +376,16 @@
"metadata": {},
"outputs": [],
"source": [
- "for tag,arr in results.items():\n",
- " fig,ax = plt.subplots(1,1)\n",
+ "for tag, arr in results.items():\n",
+ " fig, ax = plt.subplots(1, 1)\n",
" ax.set_aspect(\"equal\")\n",
- " ax.set_title(tag,loc=\"left\")\n",
+ " ax.set_title(tag, loc=\"left\")\n",
" m.dis.top = arr\n",
" ax = m.dis.top.plot(colorbar=True)\n",
- " \n",
- " #plt.colorbar(cb)\n",
+ "\n",
+ " # plt.colorbar(cb)\n",
" plt.show()\n",
- " plt.close(fig)\n"
+ " plt.close(fig)"
]
},
{
@@ -399,7 +417,9 @@
"metadata": {},
"outputs": [],
"source": [
- "hk_arr_files = [f for f in os.listdir(tmp_model_ws) if \"npf_k_\" in f and f.endswith(\".txt\")]\n",
+ "hk_arr_files = [\n",
+ " f for f in os.listdir(tmp_model_ws) if \"npf_k_\" in f and f.endswith(\".txt\")\n",
+ "]\n",
"hk_arr_files"
]
},
@@ -420,9 +440,9 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf.loc[:,\"value\"] = 1.0\n",
+ "ppdf.loc[:, \"value\"] = 1.0\n",
"pp_v = pyemu.geostats.ExpVario(contribution=1.0, a=2000)\n",
- "pp_gs = pyemu.geostats.GeoStruct(variograms=pp_v,transform=\"log\")"
+ "pp_gs = pyemu.geostats.GeoStruct(variograms=pp_v, transform=\"log\")"
]
},
{
@@ -438,29 +458,52 @@
"metadata": {},
"outputs": [],
"source": [
- "pp_files,mod_files = [],[]\n",
+ "pp_files, mod_files = [], []\n",
"for hk_arr_file in hk_arr_files:\n",
" layer = int(hk_arr_file.split(\".\")[1].split(\"layer\")[1])\n",
- " base = hk_arr_file.split('.')[1].replace(\"_\",\"\")+\"_attr:\"\n",
- " pf.add_parameters(filenames=hk_arr_file,par_type=\"grid\",\n",
- " par_name_base=base+\"gr\",pargp=base+\"gr\",zone_array=np.atleast_2d(ib).transpose(),\n",
- " upper_bound=2.,lower_bound=0.5,ult_ubound=100,ult_lbound=0.01)\n",
+ " base = hk_arr_file.split(\".\")[1].replace(\"_\", \"\") + \"_attr:\"\n",
+ " pf.add_parameters(\n",
+ " filenames=hk_arr_file,\n",
+ " par_type=\"grid\",\n",
+ " par_name_base=base + \"gr\",\n",
+ " pargp=base + \"gr\",\n",
+ " zone_array=np.atleast_2d(ib).transpose(),\n",
+ " upper_bound=2.0,\n",
+ " lower_bound=0.5,\n",
+ " ult_ubound=100,\n",
+ " ult_lbound=0.01,\n",
+ " )\n",
" pppdf = ppdf.copy()\n",
- " \n",
- " pppdf.loc[:,\"name\"] = [n for n in pppdf.ppname.values]\n",
- " pppdf.loc[:,\"ppname\"] = pppdf.name.values\n",
- " pp_file = os.path.join(pf.new_d,base+\"pp.csv\")\n",
- " pppdf.to_csv(pp_file,index=False)\n",
+ "\n",
+ " pppdf.loc[:, \"name\"] = [n for n in pppdf.ppname.values]\n",
+ " pppdf.loc[:, \"ppname\"] = pppdf.name.values\n",
+ " pp_file = os.path.join(pf.new_d, base + \"pp.csv\")\n",
+ " pppdf.to_csv(pp_file, index=False)\n",
" pp_files.append(os.path.split(pp_file)[1])\n",
" mod_files.append(hk_arr_file)\n",
- " df = pf.add_parameters(os.path.split(pp_file)[1],par_type=\"grid\",index_cols={\"ppname\":\"ppname\",\"x\":\"x\",\"y\":\"y\"},\n",
- " use_cols=[\"value\",\"bearing\",\"aniso\",\"corrlen\"],\n",
- " par_name_base=[base+\"pp\",base+\"bearing\",base+\"aniso\",base+\"corrlen\"],\n",
- " pargp=[base+\"pp\",base+\"bearing\",base+\"aniso\",base+\"corrlen\"],\n",
- " upper_bound=[20,pppdf.bearing.max()*1.1,pppdf.aniso.max()*1.1,pppdf.corrlen.max()*1.1],\n",
- " lower_bound=[.05,pppdf.bearing.min()*.9,pppdf.aniso.min()*.9,pppdf.corrlen.min()*.9],\n",
- " par_style=\"direct\",transform=\"log\",geostruct=pp_gs)\n",
- " "
+ " df = pf.add_parameters(\n",
+ " os.path.split(pp_file)[1],\n",
+ " par_type=\"grid\",\n",
+ " index_cols={\"ppname\": \"ppname\", \"x\": \"x\", \"y\": \"y\"},\n",
+ " use_cols=[\"value\", \"bearing\", \"aniso\", \"corrlen\"],\n",
+ " par_name_base=[base + \"pp\", base + \"bearing\", base + \"aniso\", base + \"corrlen\"],\n",
+ " pargp=[base + \"pp\", base + \"bearing\", base + \"aniso\", base + \"corrlen\"],\n",
+ " upper_bound=[\n",
+ " 20,\n",
+ " pppdf.bearing.max() * 1.1,\n",
+ " pppdf.aniso.max() * 1.1,\n",
+ " pppdf.corrlen.max() * 1.1,\n",
+ " ],\n",
+ " lower_bound=[\n",
+ " 0.05,\n",
+ " pppdf.bearing.min() * 0.9,\n",
+ " pppdf.aniso.min() * 0.9,\n",
+ " pppdf.corrlen.min() * 0.9,\n",
+ " ],\n",
+ " par_style=\"direct\",\n",
+ " transform=\"log\",\n",
+ " geostruct=pp_gs,\n",
+ " )"
]
},
{
@@ -486,10 +529,9 @@
"metadata": {},
"outputs": [],
"source": [
- "with open(os.path.join(template_ws,tpl_files[0]),'r') as f:\n",
+ "with open(os.path.join(template_ws, tpl_files[0]), \"r\") as f:\n",
" for _ in range(4):\n",
- " print(f.readline().strip())\n",
- " "
+ " print(f.readline().strip())"
]
},
{
@@ -507,7 +549,9 @@
"metadata": {},
"outputs": [],
"source": [
- "shutil.copy2(os.path.join(pf.new_d,\"freyberg6.disv.grb\"),os.path.join(pf.new_d,\"org.grb\"))"
+ "shutil.copy2(\n",
+ " os.path.join(pf.new_d, \"freyberg6.disv.grb\"), os.path.join(pf.new_d, \"org.grb\")\n",
+ ")"
]
},
{
@@ -516,7 +560,7 @@
"metadata": {},
"outputs": [],
"source": [
- "_ = [print(line.rstrip()) for line in open(\"ppu_helpers.py\",'r').readlines()]"
+ "_ = [print(line.rstrip()) for line in open(\"ppu_helpers.py\", \"r\").readlines()]"
]
},
{
@@ -533,8 +577,8 @@
"outputs": [],
"source": [
"# save the pp file and model file info\n",
- "df = pd.DataFrame({\"model_file\":mod_files,\"pp_file\":pp_files})\n",
- "df.to_csv(os.path.join(pf.new_d,\"pp_info.csv\"))"
+ "df = pd.DataFrame({\"model_file\": mod_files, \"pp_file\": pp_files})\n",
+ "df.to_csv(os.path.join(pf.new_d, \"pp_info.csv\"))"
]
},
{
@@ -544,6 +588,7 @@
"outputs": [],
"source": [
"import ppu_helpers\n",
+ "\n",
"ppu_helpers.setup_pps(pf.new_d)"
]
},
@@ -553,7 +598,7 @@
"metadata": {},
"outputs": [],
"source": [
- "pf.add_py_function(\"ppu_helpers.py\",\"apply_pps()\",is_pre_cmd=True)"
+ "pf.add_py_function(\"ppu_helpers.py\", \"apply_pps()\", is_pre_cmd=True)"
]
},
{
@@ -569,7 +614,9 @@
"metadata": {},
"outputs": [],
"source": [
- "interp_files = [f for f in os.listdir(pf.new_d) if f.endswith(\".txt\") and f.startswith(\"interp\")]\n",
+ "interp_files = [\n",
+ " f for f in os.listdir(pf.new_d) if f.endswith(\".txt\") and f.startswith(\"interp\")\n",
+ "]\n",
"assert len(interp_files) == 3"
]
},
@@ -580,8 +627,8 @@
"outputs": [],
"source": [
"for interp_file in interp_files:\n",
- " base = interp_file.replace(\"_\",\"\").replace(\".\",\"\").replace(\"txt\",\"\")\n",
- " pf.add_observations(interp_file,prefix=base,obsgp=base)"
+ " base = interp_file.replace(\"_\", \"\").replace(\".\", \"\").replace(\"txt\", \"\")\n",
+ " pf.add_observations(interp_file, prefix=base, obsgp=base)"
]
},
{
@@ -590,9 +637,10 @@
"metadata": {},
"outputs": [],
"source": [
- "\n",
- "log_files = [f for f in os.listdir(pf.new_d) if f.endswith(\".txt\") and f.startswith(\"log_\")]\n",
- "assert len(log_files) == 3\n"
+ "log_files = [\n",
+ " f for f in os.listdir(pf.new_d) if f.endswith(\".txt\") and f.startswith(\"log_\")\n",
+ "]\n",
+ "assert len(log_files) == 3"
]
},
{
@@ -602,8 +650,8 @@
"outputs": [],
"source": [
"for log_file in log_files:\n",
- " base = log_file.replace(\"_\",\"\").replace(\".\",\"\").replace(\"txt\",\"\")\n",
- " pf.add_observations(log_file,prefix=base,obsgp=base)"
+ " base = log_file.replace(\"_\", \"\").replace(\".\", \"\").replace(\"txt\", \"\")\n",
+ " pf.add_observations(log_file, prefix=base, obsgp=base)"
]
},
{
@@ -639,7 +687,7 @@
"metadata": {},
"outputs": [],
"source": [
- "_ = [print(line.rstrip()) for line in open(os.path.join(template_ws,\"forward_run.py\"))]"
+ "_ = [print(line.rstrip()) for line in open(os.path.join(template_ws, \"forward_run.py\"))]"
]
},
{
@@ -660,8 +708,8 @@
"outputs": [],
"source": [
"pst.control_data.noptmax = 0\n",
- "pst.write(os.path.join(pf.new_d,\"freyberg.pst\"),version=2)\n",
- "pyemu.os_utils.run(\"pestpp-ies freyberg.pst\",cwd=pf.new_d)"
+ "pst.write(os.path.join(pf.new_d, \"freyberg.pst\"), version=2)\n",
+ "pyemu.os_utils.run(\"pestpp-ies freyberg.pst\", cwd=pf.new_d)"
]
},
{
@@ -680,7 +728,7 @@
"outputs": [],
"source": [
"num_reals = 50\n",
- "pe = pf.draw(num_reals=num_reals,use_specsim=False)"
+ "pe = pf.draw(num_reals=num_reals, use_specsim=False)"
]
},
{
@@ -705,7 +753,9 @@
"metadata": {},
"outputs": [],
"source": [
- "results = ppu.generate_2d_grid_realizations(os.path.join(pf.new_d,\"org.grb\"),num_reals=num_reals)"
+ "results = ppu.generate_2d_grid_realizations(\n",
+ " os.path.join(pf.new_d, \"org.grb\"), num_reals=num_reals\n",
+ ")"
]
},
{
@@ -714,9 +764,9 @@
"metadata": {},
"outputs": [],
"source": [
- "#fig,ax = plt.subplots(1,1)\n",
- "#ax.set_aspect(\"equal\")\n",
- "#ax.pcolormesh(m.modelgrid.xcellcenters,m.modelgrid.ycellcenters,results[0])\n",
+ "# fig,ax = plt.subplots(1,1)\n",
+ "# ax.set_aspect(\"equal\")\n",
+ "# ax.pcolormesh(m.modelgrid.xcellcenters,m.modelgrid.ycellcenters,results[0])\n",
"m.dis.top = results[0]\n",
"m.dis.top.plot()\n",
"plt.show()\n",
@@ -736,7 +786,12 @@
"metadata": {},
"outputs": [],
"source": [
- "results = ppu.generate_2d_grid_realizations(os.path.join(pf.new_d,\"org.grb\"),num_reals=num_reals,variobearing=90,varioaniso=3)\n",
+ "results = ppu.generate_2d_grid_realizations(\n",
+ " os.path.join(pf.new_d, \"org.grb\"),\n",
+ " num_reals=num_reals,\n",
+ " variobearing=90,\n",
+ " varioaniso=3,\n",
+ ")\n",
"m.dis.top = results[0]\n",
"m.dis.top.plot(colorbar=True)\n",
"plt.show()\n",
@@ -766,7 +821,7 @@
"metadata": {},
"outputs": [],
"source": [
- "grid_groups = [\"npfklayer1_attr:gr\",\"npfklayer2_attr:gr\",\"npfklayer3_attr:gr\"]"
+ "grid_groups = [\"npfklayer1_attr:gr\", \"npfklayer2_attr:gr\", \"npfklayer3_attr:gr\"]"
]
},
{
@@ -775,22 +830,33 @@
"metadata": {},
"outputs": [],
"source": [
- "for igrp,grp in enumerate(grid_groups):\n",
- " grpar = par.loc[par.pargp==grp,:].copy()\n",
+ "for igrp, grp in enumerate(grid_groups):\n",
+ " grpar = par.loc[par.pargp == grp, :].copy()\n",
" assert grpar.shape[0] > 0\n",
" grpar[\"i\"] = grpar.i.astype(int)\n",
- " \n",
- " names,ivals,jvals = grpar.parnme.values,grpar.i.values,grpar.j.values\n",
- " results = ppu.generate_2d_grid_realizations(os.path.join(pf.new_d,\"org.grb\"),num_reals=num_reals,\n",
- " variobearing=1.0,varioaniso=1.0,variorange=1000,variance=0.0125,mean=0.0,random_seed=12345*igrp)\n",
+ "\n",
+ " names, ivals, jvals = grpar.parnme.values, grpar.i.values, grpar.j.values\n",
+ " results = ppu.generate_2d_grid_realizations(\n",
+ " os.path.join(pf.new_d, \"org.grb\"),\n",
+ " num_reals=num_reals,\n",
+ " variobearing=1.0,\n",
+ " varioaniso=1.0,\n",
+ " variorange=1000,\n",
+ " variance=0.0125,\n",
+ " mean=0.0,\n",
+ " random_seed=12345 * igrp,\n",
+ " )\n",
" print(grpar.shape)\n",
- " for ireal,real in enumerate(results):\n",
- " real = real[id_arr>0]\n",
- " #print(real.shape,ivals.shape,jvals.shape)\n",
- " pe.loc[pe.index[ireal],names] = 10**(real)\n",
- " print(\"group bound info: \",grpar.parlbnd.unique(),grpar.parubnd.unique())\n",
- " print(\"ensemble range info: \",pe.loc[:,names].values.min(),pe.loc[:,names].values.max())\n",
- " "
+ " for ireal, real in enumerate(results):\n",
+ " real = real[id_arr > 0]\n",
+ " # print(real.shape,ivals.shape,jvals.shape)\n",
+ " pe.loc[pe.index[ireal], names] = 10 ** (real)\n",
+ " print(\"group bound info: \", grpar.parlbnd.unique(), grpar.parubnd.unique())\n",
+ " print(\n",
+ " \"ensemble range info: \",\n",
+ " pe.loc[:, names].values.min(),\n",
+ " pe.loc[:, names].values.max(),\n",
+ " )"
]
},
{
@@ -806,8 +872,8 @@
"metadata": {},
"outputs": [],
"source": [
- "arr = np.zeros_like(id_arr,dtype=float)\n",
- "arr[id_arr>0] = pe.loc[pe.index[0],names].values\n",
+ "arr = np.zeros_like(id_arr, dtype=float)\n",
+ "arr[id_arr > 0] = pe.loc[pe.index[0], names].values\n",
"print(arr)\n",
"m.dis.top = arr\n",
"m.dis.top.plot(colorbar=True)"
@@ -829,7 +895,7 @@
"outputs": [],
"source": [
"pe.enforce()\n",
- "pe.to_csv(os.path.join(template_ws,\"prior.csv\"))"
+ "pe.to_csv(os.path.join(template_ws, \"prior.csv\"))"
]
},
{
diff --git a/examples/pypestutils_pstfrom_structured_mf6.ipynb b/examples/pypestutils_pstfrom_structured_mf6.ipynb
index 887c129..95e32f8 100644
--- a/examples/pypestutils_pstfrom_structured_mf6.ipynb
+++ b/examples/pypestutils_pstfrom_structured_mf6.ipynb
@@ -52,10 +52,10 @@
"source": [
"bin_dir = \"struct_bin\"\n",
"if os.path.exists(bin_dir):\n",
- " shutil.rmtree(bin_dir)\n",
+ " shutil.rmtree(bin_dir)\n",
"os.makedirs(bin_dir)\n",
- "flopy.utils.get_modflow(bin_dir,downloads_dir=bin_dir)\n",
- "pyemu.utils.get_pestpp(bin_dir,downloads_dir=bin_dir)"
+ "flopy.utils.get_modflow(bin_dir, downloads_dir=bin_dir)\n",
+ "pyemu.utils.get_pestpp(bin_dir, downloads_dir=bin_dir)"
]
},
{
@@ -71,7 +71,7 @@
"metadata": {},
"outputs": [],
"source": [
- "org_model_ws = os.path.join('freyberg_monthly')\n",
+ "org_model_ws = os.path.join(\"freyberg_monthly\")\n",
"os.listdir(org_model_ws)"
]
},
@@ -90,9 +90,9 @@
"metadata": {},
"outputs": [],
"source": [
- "id_arr = np.loadtxt(os.path.join(org_model_ws,\"freyberg6.dis_idomain_layer3.txt\"))\n",
- "top_arr = np.loadtxt(os.path.join(org_model_ws,\"freyberg6.dis_top.txt\"))\n",
- "top_arr[id_arr==0] = np.nan\n",
+ "id_arr = np.loadtxt(os.path.join(org_model_ws, \"freyberg6.dis_idomain_layer3.txt\"))\n",
+ "top_arr = np.loadtxt(os.path.join(org_model_ws, \"freyberg6.dis_top.txt\"))\n",
+ "top_arr[id_arr == 0] = np.nan\n",
"plt.imshow(top_arr)\n",
"top_arr.shape"
]
@@ -113,9 +113,9 @@
"tmp_model_ws = \"temp_ppu_struct\"\n",
"if os.path.exists(tmp_model_ws):\n",
" shutil.rmtree(tmp_model_ws)\n",
- "shutil.copytree(org_model_ws,tmp_model_ws)\n",
+ "shutil.copytree(org_model_ws, tmp_model_ws)\n",
"for bin_file in os.listdir(bin_dir):\n",
- " shutil.copy2(os.path.join(bin_dir,bin_file),os.path.join(tmp_model_ws,bin_file))\n",
+ " shutil.copy2(os.path.join(bin_dir, bin_file), os.path.join(tmp_model_ws, bin_file))\n",
"os.listdir(tmp_model_ws)"
]
},
@@ -135,7 +135,7 @@
"outputs": [],
"source": [
"sim = flopy.mf6.MFSimulation.load(sim_ws=tmp_model_ws)\n",
- "m = sim.get_model(\"freyberg6\")\n"
+ "m = sim.get_model(\"freyberg6\")"
]
},
{
@@ -152,10 +152,15 @@
"outputs": [],
"source": [
"template_ws = \"freyberg6_template\"\n",
- "pf = pyemu.utils.PstFrom(original_d=tmp_model_ws, new_d=template_ws,\n",
- " remove_existing=True,\n",
- " longnames=True, spatial_reference=m.modelgrid,\n",
- " zero_based=False,start_datetime=\"1-1-2018\")"
+ "pf = pyemu.utils.PstFrom(\n",
+ " original_d=tmp_model_ws,\n",
+ " new_d=template_ws,\n",
+ " remove_existing=True,\n",
+ " longnames=True,\n",
+ " spatial_reference=m.modelgrid,\n",
+ " zero_based=False,\n",
+ " start_datetime=\"1-1-2018\",\n",
+ ")"
]
},
{
@@ -173,7 +178,7 @@
"metadata": {},
"outputs": [],
"source": [
- "hdsdf = pd.read_csv(os.path.join(tmp_model_ws,\"heads.csv\"),index_col=0)\n",
+ "hdsdf = pd.read_csv(os.path.join(tmp_model_ws, \"heads.csv\"), index_col=0)\n",
"hdsdf"
]
},
@@ -190,8 +195,13 @@
"metadata": {},
"outputs": [],
"source": [
- "hds_df = pf.add_observations(\"heads.csv\",insfile=\"heads.csv.ins\",index_cols=\"time\",\n",
- " use_cols=list(hdsdf.columns.values),prefix=\"hds\",)\n",
+ "hds_df = pf.add_observations(\n",
+ " \"heads.csv\",\n",
+ " insfile=\"heads.csv.ins\",\n",
+ " index_cols=\"time\",\n",
+ " use_cols=list(hdsdf.columns.values),\n",
+ " prefix=\"hds\",\n",
+ ")\n",
"hds_df"
]
},
@@ -227,7 +237,12 @@
"outputs": [],
"source": [
"df = pd.read_csv(os.path.join(tmp_model_ws, \"sfr.csv\"), index_col=0)\n",
- "sfr_df = pf.add_observations(\"sfr.csv\", insfile=\"sfr.csv.ins\", index_cols=\"time\", use_cols=list(df.columns.values))\n",
+ "sfr_df = pf.add_observations(\n",
+ " \"sfr.csv\",\n",
+ " insfile=\"sfr.csv.ins\",\n",
+ " index_cols=\"time\",\n",
+ " use_cols=list(df.columns.values),\n",
+ ")\n",
"sfr_df"
]
},
@@ -266,7 +281,9 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = ppu.get_2d_pp_info_structured_grid(10,os.path.join(pf.new_d,\"freyberg6.dis.grb\"))\n",
+ "ppdf = ppu.get_2d_pp_info_structured_grid(\n",
+ " 10, os.path.join(pf.new_d, \"freyberg6.dis.grb\")\n",
+ ")\n",
"ppdf.head()"
]
},
@@ -276,11 +293,11 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,ax = plt.subplots(1,1)\n",
+ "fig, ax = plt.subplots(1, 1)\n",
"ax.set_aspect(\"equal\")\n",
- "#ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,top_arr)\n",
- "ax.pcolormesh(m.modelgrid.xcellcenters,m.modelgrid.ycellcenters,top_arr)\n",
- "ax.scatter(ppdf.x,ppdf.y,marker=\".\",c=\"r\")"
+ "# ax.pcolormesh(sr.xcentergrid,sr.ycentergrid,top_arr)\n",
+ "ax.pcolormesh(m.modelgrid.xcellcenters, m.modelgrid.ycellcenters, top_arr)\n",
+ "ax.scatter(ppdf.x, ppdf.y, marker=\".\", c=\"r\")"
]
},
{
@@ -296,7 +313,11 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf = ppu.get_2d_pp_info_structured_grid(10,os.path.join(pf.new_d,\"freyberg6.dis.grb\"),array_dict={\"zone\":m.dis.idomain.array[0,:,:]})\n",
+ "ppdf = ppu.get_2d_pp_info_structured_grid(\n",
+ " 10,\n",
+ " os.path.join(pf.new_d, \"freyberg6.dis.grb\"),\n",
+ " array_dict={\"zone\": m.dis.idomain.array[0, :, :]},\n",
+ ")\n",
"ppdf.head()"
]
},
@@ -306,10 +327,10 @@
"metadata": {},
"outputs": [],
"source": [
- "fig,ax = plt.subplots(1,1)\n",
+ "fig, ax = plt.subplots(1, 1)\n",
"ax.set_aspect(\"equal\")\n",
- "ax.pcolormesh(m.modelgrid.xcellcenters,m.modelgrid.ycellcenters,top_arr)\n",
- "ax.scatter(ppdf.x,ppdf.y,marker=\".\",c=\"r\")"
+ "ax.pcolormesh(m.modelgrid.xcellcenters, m.modelgrid.ycellcenters, top_arr)\n",
+ "ax.scatter(ppdf.x, ppdf.y, marker=\".\", c=\"r\")"
]
},
{
@@ -332,7 +353,7 @@
"metadata": {},
"outputs": [],
"source": [
- "ppdf.loc[:,\"value\"] = 3.0 # HK in layer 1"
+ "ppdf.loc[:, \"value\"] = 3.0 # HK in layer 1"
]
},
{
@@ -343,10 +364,10 @@
"source": [
"# set aniso to be a function of column value\n",
"# stronger aniso near the sfr network\n",
- "jmin,jmax = ppdf.j.min(),float(ppdf.j.max())\n",
- "ppdf.loc[:,\"aniso\"] = 30 * ppdf.j.values.copy() / jmax\n",
- "ppdf.loc[ppdf.aniso<1,\"aniso\"] = 1\n",
- "ppdf.loc[ppdf.aniso>10,\"aniso\"] = 10\n"
+ "jmin, jmax = ppdf.j.min(), float(ppdf.j.max())\n",
+ "ppdf.loc[:, \"aniso\"] = 30 * ppdf.j.values.copy() / jmax\n",
+ "ppdf.loc[ppdf.aniso < 1, \"aniso\"] = 1\n",
+ "ppdf.loc[ppdf.aniso > 10, \"aniso\"] = 10"
]
},
{
@@ -359,8 +380,8 @@
"# cl = ppdf.corrlen.min()\n",
"# ppdf.loc[:,\"corrlen\"] = cl * (ppdf.j.values.copy() / jmax) * sr.delc.max()\n",
"# ppdf.loc[ppdf.corrlen 0\n",
" grpar[\"i\"] = grpar.i.astype(int)\n",
" grpar[\"j\"] = grpar.j.astype(int)\n",
- " names,ivals,jvals = grpar.parnme.values,grpar.i.values,grpar.j.values\n",
- " results = ppu.generate_2d_grid_realizations(os.path.join(pf.new_d,\"org.grb\"),num_reals=num_reals,\n",
- " variobearing=1.0,varioaniso=1.0,variorange=1000,variance=0.0125,mean=0.0,random_seed=12345*igrp)\n",
- " for ireal,real in enumerate(results):\n",
- " pe.loc[pe.index[ireal],names] = 10**(real[ivals,jvals])\n",
- " print(\"group bound info: \",grpar.parlbnd.unique(),grpar.parubnd.unique())\n",
- " print(\"ensemble range info: \",pe.loc[:,names].values.min(),pe.loc[:,names].values.max())\n",
- " "
+ " names, ivals, jvals = grpar.parnme.values, grpar.i.values, grpar.j.values\n",
+ " results = ppu.generate_2d_grid_realizations(\n",
+ " os.path.join(pf.new_d, \"org.grb\"),\n",
+ " num_reals=num_reals,\n",
+ " variobearing=1.0,\n",
+ " varioaniso=1.0,\n",
+ " variorange=1000,\n",
+ " variance=0.0125,\n",
+ " mean=0.0,\n",
+ " random_seed=12345 * igrp,\n",
+ " )\n",
+ " for ireal, real in enumerate(results):\n",
+ " pe.loc[pe.index[ireal], names] = 10 ** (real[ivals, jvals])\n",
+ " print(\"group bound info: \", grpar.parlbnd.unique(), grpar.parubnd.unique())\n",
+ " print(\n",
+ " \"ensemble range info: \",\n",
+ " pe.loc[:, names].values.min(),\n",
+ " pe.loc[:, names].values.max(),\n",
+ " )"
]
},
{
@@ -918,9 +996,9 @@
"metadata": {},
"outputs": [],
"source": [
- "arr = np.zeros((nrow,ncol))\n",
- "arr[ivals,jvals] = pe.loc[pe.index[0],names]\n",
- "arr[id_arr==0] = np.nan\n",
+ "arr = np.zeros((nrow, ncol))\n",
+ "arr[ivals, jvals] = pe.loc[pe.index[0], names]\n",
+ "arr[id_arr == 0] = np.nan\n",
"cb = plt.imshow(arr)\n",
"plt.colorbar(cb)"
]
@@ -941,7 +1019,7 @@
"outputs": [],
"source": [
"pe.enforce()\n",
- "pe.to_csv(os.path.join(template_ws,\"prior.csv\"))"
+ "pe.to_csv(os.path.join(template_ws, \"prior.csv\"))"
]
}
],
diff --git a/examples/run_notebooks.py b/examples/run_notebooks.py
index 1a30316..f5bfe74 100644
--- a/examples/run_notebooks.py
+++ b/examples/run_notebooks.py
@@ -7,19 +7,34 @@
pdf = False
allow_errors = True
-def run_nb(nb_file, nb_dir):
+
+def run_nb(nb_file, nb_dir):
os.chdir(nb_dir)
- worker_dirs = [d for d in os.listdir(".") if os.path.isdir(d) and d.startswith("worker")]
+ worker_dirs = [
+ d for d in os.listdir(".") if os.path.isdir(d) and d.startswith("worker")
+ ]
for worker_dir in worker_dirs:
shutil.rmtree(worker_dir)
if allow_errors:
- os.system("jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --allow-errors --inplace {0}".format(nb_file))
+ os.system(
+ "jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --allow-errors --inplace {0}".format(
+ nb_file
+ )
+ )
else:
- os.system("jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --inplace {0}".format(nb_file))
+ os.system(
+ "jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --inplace {0}".format(
+ nb_file
+ )
+ )
if pdf:
os.system("jupyter nbconvert --to pdf {0}".format(nb_file))
if clear:
- os.system("jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --allow-errors --inplace {0}".format(nb_file))
+ os.system(
+ "jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --allow-errors --inplace {0}".format(
+ nb_file
+ )
+ )
os.chdir(cwd)
return
@@ -30,7 +45,3 @@ def run_nb(nb_file, nb_dir):
nb_files.sort()
for nb_file in nb_files:
run_nb(nb_file, nb_dir)
-
-
-
-
diff --git a/examples/swap.py b/examples/swap.py
index a98ad3d..88374c6 100644
--- a/examples/swap.py
+++ b/examples/swap.py
@@ -21,41 +21,56 @@ def repair_and_prep_quadtree_model():
if f.startswith("."):
continue
try:
- lines = open(os.path.join(org_d,f),'r').readlines()
+ lines = open(os.path.join(org_d, f), "r").readlines()
except:
- print("error for file ",f)
+ print("error for file ", f)
continue
- with open(os.path.join(new_d,f.replace("project","freyberg6")),'w') as ff:
+ with open(os.path.join(new_d, f.replace("project", "freyberg6")), "w") as ff:
for line in lines:
- ff.write(line.lower().replace("project","freyberg6"))
-
+ ff.write(line.lower().replace("project", "freyberg6"))
mn_dir = "freyberg_monthly"
- shutil.copy2(os.path.join(mn_dir,"freyberg6.tdis"),os.path.join(new_d,"freyberg6.tdis"))
+ shutil.copy2(
+ os.path.join(mn_dir, "freyberg6.tdis"), os.path.join(new_d, "freyberg6.tdis")
+ )
wel_files = [f for f in os.listdir(mn_dir) if "wel" in f and f.endswith(".txt")]
- org_uwell_file = os.path.join(new_d,"freyberg6.wel_stress_period_data_1.txt")
- org_wel_df = pd.read_csv(org_uwell_file,delim_whitespace=True,header=None,names=["layer","node","flux"])
+ org_uwell_file = os.path.join(new_d, "freyberg6.wel_stress_period_data_1.txt")
+ org_wel_df = pd.read_csv(
+ org_uwell_file,
+ delim_whitespace=True,
+ header=None,
+ names=["layer", "node", "flux"],
+ )
print(org_wel_df)
for wel_file in wel_files:
- df = pd.read_csv(os.path.join(mn_dir,wel_file),delim_whitespace=True,header=None,names=["layer","row","col","flux"])
+ df = pd.read_csv(
+ os.path.join(mn_dir, wel_file),
+ delim_whitespace=True,
+ header=None,
+ names=["layer", "row", "col", "flux"],
+ )
new_df = org_wel_df.copy()
- new_df.loc[:,"flux"] = df.flux.values
+ new_df.loc[:, "flux"] = df.flux.values
print(new_df)
- new_df.to_csv(os.path.join(new_d,wel_file),sep=" ",index=False,header=False)
- shutil.copy2(os.path.join(mn_dir,"freyberg6.wel"),os.path.join(new_d,"freyberg6.wel"))
+ new_df.to_csv(os.path.join(new_d, wel_file), sep=" ", index=False, header=False)
+ shutil.copy2(
+ os.path.join(mn_dir, "freyberg6.wel"), os.path.join(new_d, "freyberg6.wel")
+ )
rch_files = [f for f in os.listdir(mn_dir) if "rcha" in f and f.endswith(".txt")]
- org_urch_file = os.path.join(new_d,"freyberg6.rcha_recharge_1.txt")
+ org_urch_file = os.path.join(new_d, "freyberg6.rcha_recharge_1.txt")
vals = []
- with open(org_urch_file,'r') as f:
+ with open(org_urch_file, "r") as f:
for line in f:
vals.extend([float(v) for v in line.strip().split()])
org_urch_arr = np.array(vals)
- shutil.copy(os.path.join(mn_dir,"freyberg6.sto"),os.path.join(new_d,"freyberg6.sto"))
+ shutil.copy(
+ os.path.join(mn_dir, "freyberg6.sto"), os.path.join(new_d, "freyberg6.sto")
+ )
sto_files = [f for f in os.listdir(mn_dir) if "sto" in f and f.endswith(".txt")]
print(sto_files)
@@ -64,148 +79,192 @@ def repair_and_prep_quadtree_model():
rch_files.extend(npf_files)
print(rch_files)
for rch_file in rch_files:
- arr = np.loadtxt(os.path.join(mn_dir,rch_file))
+ arr = np.loadtxt(os.path.join(mn_dir, rch_file))
new_arr = np.zeros_like(org_urch_arr) + arr.mean()
- np.savetxt(os.path.join(new_d,rch_file),new_arr,fmt="%15.6E")
- shutil.copy2(os.path.join(mn_dir,"freyberg6.rcha"),os.path.join(new_d,"freyberg6.rcha"))
+ np.savetxt(os.path.join(new_d, rch_file), new_arr, fmt="%15.6E")
+ shutil.copy2(
+ os.path.join(mn_dir, "freyberg6.rcha"), os.path.join(new_d, "freyberg6.rcha")
+ )
# now unwrap all that dumbass wrapped format - what is this 1980?!
- txt_files = [f for f in os.listdir(os.path.join(new_d)) if f.startswith("freyberg6") and f.endswith(".txt")
- and "stress" not in f and "continuous" not in f and "sfr" not in f and "vertices" not in f and "cell" not in f]
+ txt_files = [
+ f
+ for f in os.listdir(os.path.join(new_d))
+ if f.startswith("freyberg6")
+ and f.endswith(".txt")
+ and "stress" not in f
+ and "continuous" not in f
+ and "sfr" not in f
+ and "vertices" not in f
+ and "cell" not in f
+ ]
txt_files.sort()
for txt_file in txt_files:
- lines = open(os.path.join(new_d,txt_file),'r').readlines()
+ lines = open(os.path.join(new_d, txt_file), "r").readlines()
vals = []
for line in lines:
vals.extend(line.strip().split())
- with open(os.path.join(new_d,txt_file),'w') as f:
- [f.write(v+"\n") for v in vals]
- #print(txt_files)
- botm = [32.5,30.0,10.0]
- for k,b in enumerate(botm):
+ with open(os.path.join(new_d, txt_file), "w") as f:
+ [f.write(v + "\n") for v in vals]
+ # print(txt_files)
+ botm = [32.5, 30.0, 10.0]
+ for k, b in enumerate(botm):
arr = np.zeros_like(org_urch_arr) + b
- np.savetxt(os.path.join(new_d,"freyberg6.disv_botm_layer{0}.txt".format(k+1)),arr,fmt="%15.6E")
- top = np.loadtxt(os.path.join(new_d,"freyberg6.disv_top.txt"))
+ np.savetxt(
+ os.path.join(new_d, "freyberg6.disv_botm_layer{0}.txt".format(k + 1)),
+ arr,
+ fmt="%15.6E",
+ )
+ top = np.loadtxt(os.path.join(new_d, "freyberg6.disv_top.txt"))
delt = top - botm[0]
- top += np.abs(delt.min()) + 1.1 #?
- np.savetxt(os.path.join(new_d,"freyberg6.disv_top.txt"),top,fmt="%15.6E")
- mnsim = flopy.mf6.MFSimulation.load(sim_ws=mn_dir,load_only=["dis"])
+ top += np.abs(delt.min()) + 1.1 # ?
+ np.savetxt(os.path.join(new_d, "freyberg6.disv_top.txt"), top, fmt="%15.6E")
+ mnsim = flopy.mf6.MFSimulation.load(sim_ws=mn_dir, load_only=["dis"])
mnm = mnsim.get_model()
mnm.modelgrid.set_coord_info(angrot=0.0)
- usim = flopy.mf6.MFSimulation.load(sim_ws=new_d)#,load_only=["disv"])
+ usim = flopy.mf6.MFSimulation.load(sim_ws=new_d) # ,load_only=["disv"])
um = usim.get_model()
um.modelgrid.set_coord_info(angrot=0.0)
- print(um.modelgrid,mnm.modelgrid)
-
+ print(um.modelgrid, mnm.modelgrid)
+
hobs_csv = "freyberg6.obs_continuous_heads.csv.txt"
- hds = pd.read_csv(os.path.join(mn_dir,hobs_csv),delim_whitespace=True,header=None,names=["site","otype","layer","row","col"])
- X,Y = mnm.modelgrid.xcellcenters,mnm.modelgrid.ycellcenters
- hds.loc[:,"x"] = hds.apply(lambda x: X[x.row-1,x.col-1],axis=1)
- hds.loc[:,"y"] = hds.apply(lambda x: Y[x.row-1,x.col-1],axis=1)
- hds.loc[:,"pt"] = hds.apply(lambda x: shapely.geometry.Point(x.x,x.y),axis=1)
+ hds = pd.read_csv(
+ os.path.join(mn_dir, hobs_csv),
+ delim_whitespace=True,
+ header=None,
+ names=["site", "otype", "layer", "row", "col"],
+ )
+ X, Y = mnm.modelgrid.xcellcenters, mnm.modelgrid.ycellcenters
+ hds.loc[:, "x"] = hds.apply(lambda x: X[x.row - 1, x.col - 1], axis=1)
+ hds.loc[:, "y"] = hds.apply(lambda x: Y[x.row - 1, x.col - 1], axis=1)
+ hds.loc[:, "pt"] = hds.apply(lambda x: shapely.geometry.Point(x.x, x.y), axis=1)
i = flopy.utils.GridIntersect(um.modelgrid)
- #print(i.intersect(shapely.geometry.Point(X[0,0],Y[0,0])))
- hds.loc[:,"inode"] = hds.pt.apply(lambda x: i.intersect(x)[0][0])
+ # print(i.intersect(shapely.geometry.Point(X[0,0],Y[0,0])))
+ hds.loc[:, "inode"] = hds.pt.apply(lambda x: i.intersect(x)[0][0])
print(hds.inode)
- hds.loc[:,"site"] = hds.apply(lambda x: "twgw_{0}_{1}".format(x.layer-1,x.inode),axis=1)
- hds.loc[:,["site","otype","layer","inode"]].to_csv(os.path.join(new_d,hobs_csv),index=False,header=False)
- shutil.copy2(os.path.join(mn_dir,"freyberg6.obs"),os.path.join(new_d,"freyberg6.obs"))
- lines = open(os.path.join(new_d,"freyberg6.nam"),'r').readlines()
- with open(os.path.join(new_d,"freyberg6.nam"),'w') as f:
+ hds.loc[:, "site"] = hds.apply(
+ lambda x: "twgw_{0}_{1}".format(x.layer - 1, x.inode), axis=1
+ )
+ hds.loc[:, ["site", "otype", "layer", "inode"]].to_csv(
+ os.path.join(new_d, hobs_csv), index=False, header=False
+ )
+ shutil.copy2(
+ os.path.join(mn_dir, "freyberg6.obs"), os.path.join(new_d, "freyberg6.obs")
+ )
+ lines = open(os.path.join(new_d, "freyberg6.nam"), "r").readlines()
+ with open(os.path.join(new_d, "freyberg6.nam"), "w") as f:
for line in lines:
-
if "end packages" in line.lower():
f.write("obs6 freyberg6.obs head_obs\n")
f.write("sto6 freyberg6.sto\n")
f.write(line)
- sfr_file = os.path.join(new_d,"freyberg6.sfr_packagedata.txt")
- df = pd.read_csv(sfr_file,delim_whitespace=True,header=None)
- df.loc[:,1] = 1 # reset layer to 1
- #botm = np.loadtxt(os.path.join(new_d,"freyberg6.disv_botm_layer1.txt"))
- df.loc[:,5] = 5e-5 #gradient
- df.loc[:,4] = 5 # width
- cumdist = np.cumsum(df.loc[:,4].values)
+ sfr_file = os.path.join(new_d, "freyberg6.sfr_packagedata.txt")
+ df = pd.read_csv(sfr_file, delim_whitespace=True, header=None)
+ df.loc[:, 1] = 1 # reset layer to 1
+ # botm = np.loadtxt(os.path.join(new_d,"freyberg6.disv_botm_layer1.txt"))
+ df.loc[:, 5] = 5e-5 # gradient
+ df.loc[:, 4] = 5 # width
+ cumdist = np.cumsum(df.loc[:, 4].values)
up = 34.0
dn = 33.501
totdrop = up - dn
totlen = cumdist[-1]
- drop_grad = totdrop/totlen
+ drop_grad = totdrop / totlen
drop_reach = up - (cumdist * drop_grad)
print(drop_reach)
- df.loc[:,6] = drop_reach #reach botm elev
- df.loc[:,7] = 1.0 #thickness of stream bed
- df.loc[:,8] = 0.1 #stream hk
- df.loc[:,9] = 0.3 #mannings
+ df.loc[:, 6] = drop_reach # reach botm elev
+ df.loc[:, 7] = 1.0 # thickness of stream bed
+ df.loc[:, 8] = 0.1 # stream hk
+ df.loc[:, 9] = 0.3 # mannings
print(df)
- df.to_csv(os.path.join(new_d,"freyberg6.sfr_packagedata.txt"),index=False,header=False,sep=" ")
+ df.to_csv(
+ os.path.join(new_d, "freyberg6.sfr_packagedata.txt"),
+ index=False,
+ header=False,
+ sep=" ",
+ )
- df = pd.read_csv(os.path.join(new_d,"freyberg6.chd_stress_period_data_1.txt"),header=None,delim_whitespace=True)
- df.loc[:,2] = 33.5
- df.to_csv(os.path.join(new_d,"freyberg6.chd_stress_period_data_1.txt"),index=False,header=False)
+ df = pd.read_csv(
+ os.path.join(new_d, "freyberg6.chd_stress_period_data_1.txt"),
+ header=None,
+ delim_whitespace=True,
+ )
+ df.loc[:, 2] = 33.5
+ df.to_csv(
+ os.path.join(new_d, "freyberg6.chd_stress_period_data_1.txt"),
+ index=False,
+ header=False,
+ )
for k in range(3):
- id_name = os.path.join(new_d,"freyberg6.disv_idomain_layer{0}.txt".format(k+1))
+ id_name = os.path.join(
+ new_d, "freyberg6.disv_idomain_layer{0}.txt".format(k + 1)
+ )
arr = np.loadtxt(id_name)
- arr[arr>0] = 1
- np.savetxt(id_name,arr,fmt="%2.0f")
-
+ arr[arr > 0] = 1
+ np.savetxt(id_name, arr, fmt="%2.0f")
- pyemu.os_utils.run("mf6",cwd=new_d)
+ pyemu.os_utils.run("mf6", cwd=new_d)
- #now write an "obs" twgw file with xy layer info
+ # now write an "obs" twgw file with xy layer info
mnm.modelgrid.set_coord_info(angrot=-55)
- X,Y = mnm.modelgrid.xcellcenters,mnm.modelgrid.ycellcenters
- hds.loc[:,"x"] = hds.apply(lambda x: X[x.row-1,x.col-1],axis=1)
- hds.loc[:,"y"] = hds.apply(lambda x: Y[x.row-1,x.col-1],axis=1)
+ X, Y = mnm.modelgrid.xcellcenters, mnm.modelgrid.ycellcenters
+ hds.loc[:, "x"] = hds.apply(lambda x: X[x.row - 1, x.col - 1], axis=1)
+ hds.loc[:, "y"] = hds.apply(lambda x: Y[x.row - 1, x.col - 1], axis=1)
- obsdf = pd.read_csv(os.path.join(new_d,"heads.csv"),index_col=0)
+ obsdf = pd.read_csv(os.path.join(new_d, "heads.csv"), index_col=0)
obsdf.columns = [c.lower() for c in obsdf.columns]
- obsdf = obsdf.melt(ignore_index=False,var_name="site",value_name="obsval")
- start_datetime= pd.to_datetime("1-1-2018")
- obsdf.index = start_datetime + pd.to_timedelta(obsdf.index.values,unit="d")
+ obsdf = obsdf.melt(ignore_index=False, var_name="site", value_name="obsval")
+ start_datetime = pd.to_datetime("1-1-2018")
+ obsdf.index = start_datetime + pd.to_timedelta(obsdf.index.values, unit="d")
obsdf.index.name = "datetime"
hds.index = hds.site.values
- noisex = np.random.uniform(-40,40,hds.x.shape[0])
+ noisex = np.random.uniform(-40, 40, hds.x.shape[0])
xdict = (hds.x + noisex).to_dict()
- noisey = np.random.uniform(-40,40,hds.y.shape[0])
+ noisey = np.random.uniform(-40, 40, hds.y.shape[0])
ydict = (hds.y + noisey).to_dict()
layerdict = hds.layer.to_dict()
- obsdf.loc[:,"x"] = obsdf.site.apply(lambda x: xdict[x])
- obsdf.loc[:,"y"] = obsdf.site.apply(lambda x: ydict[x])
- obsdf.loc[:,"layer"] = obsdf.site.apply(lambda x: layerdict[x])
- #now remove some values
- keep_idx = np.arange(0,obsdf.shape[0],dtype=int)
+ obsdf.loc[:, "x"] = obsdf.site.apply(lambda x: xdict[x])
+ obsdf.loc[:, "y"] = obsdf.site.apply(lambda x: ydict[x])
+ obsdf.loc[:, "layer"] = obsdf.site.apply(lambda x: layerdict[x])
+ # now remove some values
+ keep_idx = np.arange(0, obsdf.shape[0], dtype=int)
np.random.shuffle(keep_idx)
- keep_idx = keep_idx[:int(3*keep_idx.shape[0]/4)]
- kobsdf = obsdf.iloc[keep_idx,:].copy()
+ keep_idx = keep_idx[: int(3 * keep_idx.shape[0] / 4)]
+ kobsdf = obsdf.iloc[keep_idx, :].copy()
kobsdf.sort_index(inplace=True)
- print(obsdf.shape,kobsdf.shape)
+ print(obsdf.shape, kobsdf.shape)
print(kobsdf)
# now some datetime noise
- noise = np.random.uniform(-30,30,kobsdf.shape[0])
- noise = np.round(noise,1)
- td_noise = pd.to_timedelta(noise,unit='d')
+ noise = np.random.uniform(-30, 30, kobsdf.shape[0])
+ noise = np.round(noise, 1)
+ td_noise = pd.to_timedelta(noise, unit="d")
kobsdf.index = kobsdf.index + td_noise
- kobsdf.loc[:,"datetime"] = kobsdf.index.values
- kobsdf.loc[:,["site","datetime","x","y","layer","obsval"]].to_csv(os.path.join("freyberg_aux_files","gwlevel_obs.csv"),index=False)
+ kobsdf.loc[:, "datetime"] = kobsdf.index.values
+ kobsdf.loc[:, ["site", "datetime", "x", "y", "layer", "obsval"]].to_csv(
+ os.path.join("freyberg_aux_files", "gwlevel_obs.csv"), index=False
+ )
print(kobsdf)
+
def invest_quadtree_results():
import matplotlib.pyplot as plt
+
ws = "freyberg_quadtree"
sim = flopy.mf6.MFSimulation.load(sim_ws=ws)
m = sim.get_model()
- hds = flopy.utils.HeadFile(os.path.join(ws,"freyberg6.hds"),model=m)
+ hds = flopy.utils.HeadFile(os.path.join(ws, "freyberg6.hds"), model=m)
hds.plot(colorbar=True)
plt.show()
+
if __name__ == "__main__":
repair_and_prep_quadtree_model()
- #invest_quadtree_results()
+ # invest_quadtree_results()
diff --git a/pypestutils/ctypes_declarations.py b/pypestutils/ctypes_declarations.py
index 1d7cf46..0f0c98a 100644
--- a/pypestutils/ctypes_declarations.py
+++ b/pypestutils/ctypes_declarations.py
@@ -1,4 +1,5 @@
"""Low-level Fortran-Python ctypes functions."""
+
from __future__ import annotations
from ctypes import ARRAY, CDLL, POINTER, c_char, c_double, c_int
diff --git a/pypestutils/data.py b/pypestutils/data.py
index 0521e97..374f4bb 100644
--- a/pypestutils/data.py
+++ b/pypestutils/data.py
@@ -1,4 +1,5 @@
"""Data module."""
+
from __future__ import annotations
from enum import Enum
@@ -157,8 +158,7 @@ def __init__(
if name in self._names:
raise KeyError(f"'{name}' defined more than once")
self._names.append(name)
- float_any[name] = ar = np.asarray(
- float_any[name], np.float64, order="F")
+ float_any[name] = ar = np.asarray(float_any[name], np.float64, order="F")
if not self.shape and ar.ndim == 1:
self.shape = ar.shape
for name in int_any.keys():
diff --git a/pypestutils/enum.py b/pypestutils/enum.py
index 939cf81..2303ff1 100644
--- a/pypestutils/enum.py
+++ b/pypestutils/enum.py
@@ -1,4 +1,5 @@
"""Enumeration module."""
+
from __future__ import annotations
from enum import IntEnum
diff --git a/pypestutils/finder.py b/pypestutils/finder.py
index a00dae6..b80353e 100644
--- a/pypestutils/finder.py
+++ b/pypestutils/finder.py
@@ -1,4 +1,5 @@
"""Locate pestutils shared library by any means necessary."""
+
import ctypes
import os
import platform
diff --git a/pypestutils/helpers.py b/pypestutils/helpers.py
index 181b4e3..8335bc4 100644
--- a/pypestutils/helpers.py
+++ b/pypestutils/helpers.py
@@ -7,10 +7,19 @@
from .pestutilslib import PestUtilsLib
-def mod2obs_mf6(gridinfo_fname: str,depvar_fname: str,obscsv_fname: str ,model_type: int,start_datetime: str | pd.TimeStamp,depvar_ftype=1,
- depvar_name="head",interp_thresh=1.0e+30,no_interp_val=1.0e+30,model_timeunit="d",
- time_extrap=1.0)->dict:
-
+def mod2obs_mf6(
+ gridinfo_fname: str,
+ depvar_fname: str,
+ obscsv_fname: str,
+ model_type: int,
+ start_datetime: str | pd.TimeStamp,
+ depvar_ftype=1,
+ depvar_name="head",
+ interp_thresh=1.0e30,
+ no_interp_val=1.0e30,
+ model_timeunit="d",
+ time_extrap=1.0,
+) -> dict:
"""python implementation of mod2smp and mod2obs using modflow6 binary grid files
Parameters
----------
@@ -45,8 +54,8 @@ def mod2obs_mf6(gridinfo_fname: str,depvar_fname: str,obscsv_fname: str ,model_t
temporally interpolated simulated results at observation locations (ie mod2obs)
"""
- for fname in [gridinfo_fname,depvar_fname]:
- assert os.path.exists(fname),"file {0} not found".format(fname)
+ for fname in [gridinfo_fname, depvar_fname]:
+ assert os.path.exists(fname), "file {0} not found".format(fname)
lib = PestUtilsLib()
is_mf6 = False
is_structured = True
@@ -69,68 +78,109 @@ def mod2obs_mf6(gridinfo_fname: str,depvar_fname: str,obscsv_fname: str ,model_t
raise Exception("unrecognized 'model_type':{0}".format(model_type))
depvar_ftype = int(depvar_ftype)
- if depvar_ftype not in [1,2]:
+ if depvar_ftype not in [1, 2]:
raise Exception("unrecognized 'depvar_ftype':{0}".format(depvar_ftype))
if is_mf6:
- grid_info = lib.install_mf6_grid_from_file("grid",gridinfo_fname)
+ grid_info = lib.install_mf6_grid_from_file("grid", gridinfo_fname)
else:
raise NotImplementedError()
-
- if isinstance(start_datetime,str):
+
+ if isinstance(start_datetime, str):
start_datetime = pd.to_datetime(start_datetime)
- depvar_info = lib.inquire_modflow_binary_file_specs(depvar_fname,depvar_fname+".out.csv",model_type,depvar_ftype)
- depvar_df = pd.read_csv(depvar_fname+".out.csv")
+ depvar_info = lib.inquire_modflow_binary_file_specs(
+ depvar_fname, depvar_fname + ".out.csv", model_type, depvar_ftype
+ )
+ depvar_df = pd.read_csv(depvar_fname + ".out.csv")
depvar_df.columns = [c.lower() for c in depvar_df.columns]
- #print(depvar_df)
+ # print(depvar_df)
- if isinstance(obscsv_fname,str):
+ if isinstance(obscsv_fname, str):
if not os.path.exists(obscsv_fname):
raise Exception("obscsv_fname '{0}' not found".format(obscsv_fname))
# todo: think about supporting a site sample file maybe?
- obsdf = pd.read_csv(os.path.join(obscsv_fname),parse_dates=["datetime"])
- elif isinstance(obscsv_fname,pd.DataFrame):
+ obsdf = pd.read_csv(os.path.join(obscsv_fname), parse_dates=["datetime"])
+ elif isinstance(obscsv_fname, pd.DataFrame):
obsdf = obscsv_fname.copy()
else:
- raise Exception("obscsv arg type not recognized (looking for str or pd.DataFrame):'{0}'".format(type(obscsv_fname)))
- #check obsdf
+ raise Exception(
+ "obscsv arg type not recognized (looking for str or pd.DataFrame):'{0}'".format(
+ type(obscsv_fname)
+ )
+ )
+ # check obsdf
obsdf.columns = [c.lower() for c in obsdf.columns]
- for req_col in ["site","x","y","datetime","layer"]:
+ for req_col in ["site", "x", "y", "datetime", "layer"]:
if req_col not in obsdf.columns:
- raise Exception("observation dataframe missing column '{0}'".format(req_col))
+ raise Exception(
+ "observation dataframe missing column '{0}'".format(req_col)
+ )
usitedf = obsdf.groupby("site").first()
pth = os.path.split(depvar_fname)[0]
- fac_file = os.path.join(pth,"obs_interp_fac.bin")
- bln_file = fac_file.replace(".bin",".bln")
- interp_fac_results = lib.calc_mf6_interp_factors("grid",usitedf.x.values,usitedf.y.values,usitedf.layer.values,fac_file,"binary",bln_file)
+ fac_file = os.path.join(pth, "obs_interp_fac.bin")
+ bln_file = fac_file.replace(".bin", ".bln")
+ interp_fac_results = lib.calc_mf6_interp_factors(
+ "grid",
+ usitedf.x.values,
+ usitedf.y.values,
+ usitedf.layer.values,
+ fac_file,
+ "binary",
+ bln_file,
+ )
if 0 in interp_fac_results:
- print("warning: the following site(s) failed to have interpolation factors calculated:")
- fsites = usitedf.reset_index().site.iloc[interp_fac_results==0].to_list()
+ print(
+ "warning: the following site(s) failed to have interpolation factors calculated:"
+ )
+ fsites = usitedf.reset_index().site.iloc[interp_fac_results == 0].to_list()
print(fsites)
- all_results = lib.interp_from_mf6_depvar_file(depvar_fname,fac_file,"binary",depvar_info["ntime"],depvar_name,interp_thresh,True,
- no_interp_val,usitedf.shape[0])
- datetimes = start_datetime+pd.to_timedelta(all_results["simtime"],unit=model_timeunit)
- allresults_df = pd.DataFrame(all_results["simstate"],index=datetimes,columns=usitedf.index)
- allresults_df.to_csv(depvar_fname+".all.csv")
+ all_results = lib.interp_from_mf6_depvar_file(
+ depvar_fname,
+ fac_file,
+ "binary",
+ depvar_info["ntime"],
+ depvar_name,
+ interp_thresh,
+ True,
+ no_interp_val,
+ usitedf.shape[0],
+ )
+ datetimes = start_datetime + pd.to_timedelta(
+ all_results["simtime"], unit=model_timeunit
+ )
+ allresults_df = pd.DataFrame(
+ all_results["simstate"], index=datetimes, columns=usitedf.index
+ )
+ allresults_df.to_csv(depvar_fname + ".all.csv")
if "totim" in obsdf:
print("WARNING: replacing existing 'totim' column in observation dataframe")
- obsdf.loc[:,"totim"] = obsdf.datetime.apply(lambda x: x - start_datetime).dt.days
+ obsdf.loc[:, "totim"] = obsdf.datetime.apply(lambda x: x - start_datetime).dt.days
usite = obsdf.site.unique()
usite.sort()
- usite_dict = {s:c for s,c in zip(usite,np.arange(usite.shape[0],dtype=int))}
- obsdf.loc[:,"isite"] = obsdf.site.apply(lambda x: usite_dict[x])
- obsdf.sort_values(by=["isite","totim"],inplace=True)
-
- interp_results = lib.interp_to_obstime(all_results["nproctime"],all_results["simtime"],all_results["simstate"],interp_thresh,"L",
- time_extrap,no_interp_val,obsdf.isite.values,obsdf.totim.values)
-
- obsdf.loc[:,"simulated"] = interp_results
- lib.uninstall_mf6_grid('grid')
+ usite_dict = {s: c for s, c in zip(usite, np.arange(usite.shape[0], dtype=int))}
+ obsdf.loc[:, "isite"] = obsdf.site.apply(lambda x: usite_dict[x])
+ obsdf.sort_values(by=["isite", "totim"], inplace=True)
+
+ interp_results = lib.interp_to_obstime(
+ all_results["nproctime"],
+ all_results["simtime"],
+ all_results["simstate"],
+ interp_thresh,
+ "L",
+ time_extrap,
+ no_interp_val,
+ obsdf.isite.values,
+ obsdf.totim.values,
+ )
+
+ obsdf.loc[:, "simulated"] = interp_results
+ lib.uninstall_mf6_grid("grid")
lib.free_all_memory()
- return {"all_results":allresults_df,"interpolated_results":obsdf}
+ return {"all_results": allresults_df, "interpolated_results": obsdf}
+
def get_grid_info_from_gridspec(gridspec_fname: str) -> dict:
"""Read structured grid info from a PEST-style grid specification file
@@ -138,7 +188,7 @@ def get_grid_info_from_gridspec(gridspec_fname: str) -> dict:
----------
gridspec_fname : str
PEST-style grid specification file
-
+
Returns
-------
grid_info: dict
@@ -155,7 +205,7 @@ def get_grid_info_from_gridspec(gridspec_fname: str) -> dict:
"nrow": sr.nrow,
"ncol": sr.ncol,
"delr": sr.delr,
- "delc": sr.delc
+ "delc": sr.delc,
}
@@ -165,7 +215,7 @@ def get_grid_info_from_mf6_grb(grb_fname: str) -> dict:
----------
grb_fname: str
MODFLOW-6 binary grid file
-
+
Returns
-------
grid_info: dict
@@ -174,8 +224,8 @@ def get_grid_info_from_mf6_grb(grb_fname: str) -> dict:
if not os.path.exists(grb_fname):
raise FileNotFoundError(grb_fname)
lib = PestUtilsLib()
- data = lib.install_mf6_grid_from_file("grid",grb_fname)
- data["x"],data["y"],data["z"] = lib.get_cell_centres_mf6("grid",data["ncells"])
+ data = lib.install_mf6_grid_from_file("grid", grb_fname)
+ data["x"], data["y"], data["z"] = lib.get_cell_centres_mf6("grid", data["ncells"])
lib.uninstall_mf6_grid("grid")
lib.free_all_memory()
return data
@@ -191,22 +241,22 @@ def get_2d_grid_info_from_file(fname: str, layer=None) -> dict:
layer: int (optional)
the (one-based) layer number to use for 2-D. If None and
grid info is 3-D, a value of 1 is used
-
+
Returns
-------
grid_info: dict
grid information
- """
+ """
grid_info = None
- if isinstance(fname,str):
+ if isinstance(fname, str):
if not os.path.exists(fname):
raise FileNotFoundError(fname)
if fname.lower().endswith(".csv"):
grid_info = pd.read_csv(fname)
grid_info.columns = [c.lower() for c in grid_info.columns]
- fname = grid_info # for checks and processing below
-
+ fname = grid_info # for checks and processing below
+
else:
try:
grid_info = get_grid_info_from_gridspec(fname)
@@ -214,23 +264,26 @@ def get_2d_grid_info_from_file(fname: str, layer=None) -> dict:
try:
grid_info = get_2d_grid_info_from_mf6_grb(fname, layer=layer)
except Exception as e2:
-
- raise Exception("error getting grid info from file '{0}'".format(fname))
-
- if isinstance(fname,pd.DataFrame):
- if 'x' not in fname.columns:
+ raise Exception(
+ "error getting grid info from file '{0}'".format(fname)
+ )
+
+ if isinstance(fname, pd.DataFrame):
+ if "x" not in fname.columns:
raise Exception("required 'x' column not found in grid info dataframe")
- if 'y' not in fname.columns:
+ if "y" not in fname.columns:
raise Exception("required 'y' column not found in grid info dataframe")
- if layer is not None and 'layer' not in fname.columns:
- print("WARNING: 'layer' arg is not None but 'layer' not found in grid info dataframe...")
+ if layer is not None and "layer" not in fname.columns:
+ print(
+ "WARNING: 'layer' arg is not None but 'layer' not found in grid info dataframe..."
+ )
# I think these should just be references to column values (not copies)
- grid_info = {c:fname[c].values for c in fname.columns}
-
+ grid_info = {c: fname[c].values for c in fname.columns}
+
return grid_info
-def get_2d_grid_info_from_mf6_grb(grb_fname: str,layer=None) -> dict:
+def get_2d_grid_info_from_mf6_grb(grb_fname: str, layer=None) -> dict:
"""Read grid info from a MODFLOW-6 binary grid file
Parameters
----------
@@ -239,7 +292,7 @@ def get_2d_grid_info_from_mf6_grb(grb_fname: str,layer=None) -> dict:
layer: int (optional)
the layer number to use for 2-D. If None,
a value of 1 is used
-
+
Returns
-------
grid_info: dict
@@ -249,19 +302,23 @@ def get_2d_grid_info_from_mf6_grb(grb_fname: str,layer=None) -> dict:
nnodes = grid_info["ncells"]
x = grid_info["x"].copy()
y = grid_info["y"].copy()
- nrow,ncol = None,None
+ nrow, ncol = None, None
if grid_info["idis"] == 1:
nlay = grid_info["ndim3"]
if layer is not None:
assert layer != 0, "Value provided for layer should be 1 based, sorry"
if layer > nlay:
- raise Exception("user-supplied 'layer' {0} greater than nlay {1}".format(layer,nlay))
+ raise Exception(
+ "user-supplied 'layer' {0} greater than nlay {1}".format(
+ layer, nlay
+ )
+ )
else:
layer = 1
nrow = grid_info["ndim2"]
ncol = grid_info["ndim1"]
- x = x.reshape((nlay,nrow,ncol))[layer-1]
- y = y.reshape((nlay,nrow,ncol))[layer-1]
+ x = x.reshape((nlay, nrow, ncol))[layer - 1]
+ y = y.reshape((nlay, nrow, ncol))[layer - 1]
grid_info["nnodes"] = nrow * ncol
grid_info["x"] = x
grid_info["y"] = y
@@ -272,12 +329,16 @@ def get_2d_grid_info_from_mf6_grb(grb_fname: str,layer=None) -> dict:
nlay = grid_info["ndim3"]
if layer is not None:
if layer > nlay:
- raise Exception("user-supplied 'layer' {0} greater than nlay {1}".format(layer,nlay))
+ raise Exception(
+ "user-supplied 'layer' {0} greater than nlay {1}".format(
+ layer, nlay
+ )
+ )
else:
layer = 1
ncpl = grid_info["ndim1"]
- x = x.reshape((nlay,ncpl))[layer-1]
- y = y.reshape((nlay,ncpl))[layer-1]
+ x = x.reshape((nlay, ncpl))[layer - 1]
+ y = y.reshape((nlay, ncpl))[layer - 1]
grid_info["nnodes"] = ncpl
grid_info["x"] = x
grid_info["y"] = y
@@ -285,12 +346,9 @@ def get_2d_grid_info_from_mf6_grb(grb_fname: str,layer=None) -> dict:
def get_2d_pp_info_structured_grid(
- pp_space: int,
- gridinfo_fname: str,
- array_dict = {},
- name_prefix="pp"
+ pp_space: int, gridinfo_fname: str, array_dict={}, name_prefix="pp"
) -> pd.DataFrame:
- """Create a grid of pilot point locations for a
+ """Create a grid of pilot point locations for a
2-D structured grid
Parameters
----------
@@ -299,14 +357,14 @@ def get_2d_pp_info_structured_grid(
gridinfo_fname: str
file contain grid information
array_dict: dict (optional)
- a dict of 2-D grid-shape arrays used to populate
+ a dict of 2-D grid-shape arrays used to populate
pilot point attributes. Special values include:
- "value","zone","bearing","aniso" and "corrlen",
+ "value","zone","bearing","aniso" and "corrlen",
although any number of arrays can be passed and will
sampled at pilot point locations
name_prefix: str
pilot point name prefix. Default is "pp"
-
+
Returns
-------
ppdf: pd.DataaFrame
@@ -317,29 +375,31 @@ def get_2d_pp_info_structured_grid(
grid_info = get_2d_grid_info_from_file(gridinfo_fname)
pname, px, py, pval = [], [], [], []
pi, pj = [], []
- parr_dict = {k:[] for k in array_dict.keys()}
+ parr_dict = {k: [] for k in array_dict.keys()}
count = 0
nrow = grid_info["nrow"]
ncol = grid_info["ncol"]
- nlay = grid_info.get("nlay",1)
+ nlay = grid_info.get("nlay", 1)
- zone_array = array_dict.get("zone",None)
+ zone_array = array_dict.get("zone", None)
- x = grid_info['x']
- y = grid_info['y']
- x = x.reshape((nlay,nrow,ncol))[0,:,:]
- y = y.reshape((nlay,nrow,ncol))[0,:,:]
+ x = grid_info["x"]
+ y = grid_info["y"]
+ x = x.reshape((nlay, nrow, ncol))[0, :, :]
+ y = y.reshape((nlay, nrow, ncol))[0, :, :]
if nrow is None:
- raise Exception("unstructured grid loaded from gridinfo_fname '{0}'".format(gridspec_fname))
+ raise Exception(
+ "unstructured grid loaded from gridinfo_fname '{0}'".format(gridspec_fname)
+ )
for i in range(int(pp_space / 2), nrow, pp_space):
for j in range(int(pp_space / 2), ncol, pp_space):
if zone_array is not None and zone_array[i, j] <= 0:
continue
px.append(x[i, j])
py.append(y[i, j])
- #if zone_array is not None:
+ # if zone_array is not None:
# pzone.append(zone_array[i, j])
- #else:
+ # else:
# pzone.append(1)
pname.append(name_prefix + "{0}".format(count))
@@ -356,15 +416,15 @@ def get_2d_pp_info_structured_grid(
},
index=pname,
)
- df.loc[:,"value"] = 1.0
+ df.loc[:, "value"] = 1.0
df.loc[:, "bearing"] = 0.0
df.loc[:, "aniso"] = 1.0
delx = pp_space * 5 * int((x.max() - x.min()) / float(ncol))
dely = pp_space * 5 * int((y.max() - y.min()) / float(nrow))
- df.loc[:, "corrlen"] = max(delx,dely) # ?
- df.loc[:,"zone"] = 1
- for k,arr in array_dict.items():
- df.loc[:,k] = arr[df.i,df.j]
+ df.loc[:, "corrlen"] = max(delx, dely) # ?
+ df.loc[:, "zone"] = 1
+ for k, arr in array_dict.items():
+ df.loc[:, k] = arr[df.i, df.j]
df["zone"] = df.zone.astype(int)
return df
@@ -381,16 +441,16 @@ def interpolate_with_sva_pilotpoints_2d(
search_dist=1e30,
zone_array=1,
verbose=True,
- layer=None
+ layer=None,
) -> dict:
"""Perform 2-D pilot point interpolation using
spatially varying geostatistical hyper-parameters
Parameters
----------
pp_info: pandas.DataFrame
- dataframe with pilot point info. Required columns
+ dataframe with pilot point info. Required columns
include: "x","y",and "value". optional columns include:
- "zone","bearing","aniso",and "corrlen"
+ "zone","bearing","aniso",and "corrlen"
gridinfo_fname: str
file name storing grid information
vartype: str
@@ -409,7 +469,7 @@ def interpolate_with_sva_pilotpoints_2d(
search distance to use when looking for nearby pilot points.
Default is 1.0e+30
zone_array: int | numpy.ndarray
- the zone array to match up with "zone" value in `pp_info`. If
+ the zone array to match up with "zone" value in `pp_info`. If
integer type, a constant zone array of value "zone_array" is used.
Default is 1
verbose: bool
@@ -422,7 +482,7 @@ def interpolate_with_sva_pilotpoints_2d(
Returns
-------
results: dict
- resulting arrays of the various interpolation from pilot
+ resulting arrays of the various interpolation from pilot
points to grid-shaped arrays
"""
# some checks on pp_info
@@ -447,13 +507,13 @@ def interpolate_with_sva_pilotpoints_2d(
nrow, ncol = None, None
x, y, area = None, None, None
grid_info = get_2d_grid_info_from_file(gridinfo_fname, layer)
- nrow = grid_info.get("nrow",None)
- ncol = grid_info.get("ncol",None)
- x = grid_info['x']
- y = grid_info['y']
- area = grid_info.get("area",None)
- idis = grid_info.get("idis",None)
- nnodes = grid_info.get("nnodes",None)
+ nrow = grid_info.get("nrow", None)
+ ncol = grid_info.get("ncol", None)
+ x = grid_info["x"]
+ y = grid_info["y"]
+ area = grid_info.get("area", None)
+ idis = grid_info.get("idis", None)
+ nnodes = grid_info.get("nnodes", None)
if area is None:
area = np.ones_like(x)
if nnodes is None:
@@ -468,8 +528,7 @@ def interpolate_with_sva_pilotpoints_2d(
# TODO warn here
lib.logger.info("casting zone_array from %r to int", zone_array.dtype)
zone_array = zone_array.astype(int)
-
-
+
hyperfac_ftype = "binary"
if verbose:
hyperfac_ftype = "text"
@@ -480,15 +539,26 @@ def interpolate_with_sva_pilotpoints_2d(
hypertrans = "none"
fac_files = []
- lib.logger.info("using bearing of %r and aniso of %r for hyperpar interpolation", hyperbearing, hyperaniso)
- lib.logger.info("using %r variogram with %r transform for hyperpar interpolation",hypervartype,hypertrans)
-
- results = {}
+ lib.logger.info(
+ "using bearing of %r and aniso of %r for hyperpar interpolation",
+ hyperbearing,
+ hyperaniso,
+ )
+ lib.logger.info(
+ "using %r variogram with %r transform for hyperpar interpolation",
+ hypervartype,
+ hypertrans,
+ )
+
+ results = {}
bearing = np.zeros_like(x)
if "bearing" in pp_info.columns:
hypernoint = pp_info.bearing.mean()
- lib.logger.info("using no-interpolation value of %r for 'bearing' hyperpar interpolation", hypernoint)
+ lib.logger.info(
+ "using no-interpolation value of %r for 'bearing' hyperpar interpolation",
+ hypernoint,
+ )
hyperfac_fname = "tempbearing.fac"
npts = lib.calc_kriging_factors_auto_2d(
pp_info.x.values,
@@ -517,17 +587,19 @@ def interpolate_with_sva_pilotpoints_2d(
fac_files.append(hyperfac_fname)
if verbose:
if nrow is not None:
- np.savetxt("bearing.txt",bearing.reshape(nrow,ncol),fmt="%15.6E")
+ np.savetxt("bearing.txt", bearing.reshape(nrow, ncol), fmt="%15.6E")
else:
- np.savetxt("bearing.txt",bearing,fmt="%15.6E")
+ np.savetxt("bearing.txt", bearing, fmt="%15.6E")
results["bearing"] = bearing
-
aniso = np.ones_like(x)
if "aniso" in pp_info.columns:
hypernoint = pp_info.aniso.mean()
- lib.logger.info("using no-interpolation value of %r for 'aniso' hyperpar interpolation", hypernoint)
-
+ lib.logger.info(
+ "using no-interpolation value of %r for 'aniso' hyperpar interpolation",
+ hypernoint,
+ )
+
hyperfac_fname = "tempaniso.fac"
npts = lib.calc_kriging_factors_auto_2d(
pp_info.x.values,
@@ -555,9 +627,9 @@ def interpolate_with_sva_pilotpoints_2d(
aniso = result["targval"]
if verbose:
if nrow is not None:
- np.savetxt("aniso.txt",aniso.reshape(nrow,ncol),fmt="%15.6E")
+ np.savetxt("aniso.txt", aniso.reshape(nrow, ncol), fmt="%15.6E")
else:
- np.savetxt("aniso.txt",aniso,fmt="%15.6E")
+ np.savetxt("aniso.txt", aniso, fmt="%15.6E")
results["aniso"] = aniso
@@ -565,7 +637,10 @@ def interpolate_with_sva_pilotpoints_2d(
corrlen = None # todo corrlen default where not in ppdf
if "corrlen" in pp_info.columns:
hypernoint = pp_info.corrlen.mean()
- lib.logger.info("using no-interpolation value of %r for 'corrlen' hyperpar interpolation", hypernoint)
+ lib.logger.info(
+ "using no-interpolation value of %r for 'corrlen' hyperpar interpolation",
+ hypernoint,
+ )
hyperfac_fname = "tempcorrlen.fac"
npts = lib.calc_kriging_factors_auto_2d(
pp_info.x.values,
@@ -595,9 +670,9 @@ def interpolate_with_sva_pilotpoints_2d(
use_auto = False
if verbose:
if nrow is not None:
- np.savetxt("corrlen.txt",corrlen.reshape(nrow,ncol),fmt="%15.6E")
+ np.savetxt("corrlen.txt", corrlen.reshape(nrow, ncol), fmt="%15.6E")
else:
- np.savetxt("corrlen.txt",corrlen,fmt="%15.6E")
+ np.savetxt("corrlen.txt", corrlen, fmt="%15.6E")
results["corrlen"] = corrlen
if not verbose:
@@ -660,8 +735,8 @@ def interpolate_with_sva_pilotpoints_2d(
results["result"] = result["targval"]
if nrow is not None:
- for k,v in results.items():
- results[k] = v.reshape(nrow,ncol)
+ for k, v in results.items():
+ results[k] = v.reshape(nrow, ncol)
return results
@@ -679,9 +754,9 @@ def generate_2d_grid_realizations(
variobearing=0.0,
random_seed=12345,
layer=None,
-) ->np.NDArray[float]:
- """draw 2-D realizations using sequential gaussian
- simulations and optionally using spatially varying
+) -> np.NDArray[float]:
+ """draw 2-D realizations using sequential gaussian
+ simulations and optionally using spatially varying
geostatistical hyper parameters.
Parameters
----------
@@ -718,24 +793,21 @@ def generate_2d_grid_realizations(
will be reshaped to NROW X NCOL)
"""
-
-
nrow, ncol = None, None
x, y, area = None, None, None
grid_info = get_2d_grid_info_from_file(gridinfo_fname, layer)
- nrow = grid_info.get("nrow",None)
- ncol = grid_info.get("ncol",None)
- x = grid_info['x']
- y = grid_info['y']
- area = grid_info.get("area",None)
- idis = grid_info.get("idis",None)
- nnodes = grid_info.get("nnodes",None)
+ nrow = grid_info.get("nrow", None)
+ ncol = grid_info.get("ncol", None)
+ x = grid_info["x"]
+ y = grid_info["y"]
+ area = grid_info.get("area", None)
+ idis = grid_info.get("idis", None)
+ nnodes = grid_info.get("nnodes", None)
if area is None:
area = np.ones_like(x)
if nnodes is None:
nnodes = x.shape[0]
-
if not isinstance(mean, np.ndarray):
mean = np.zeros((nnodes)) + mean
if not isinstance(variance, np.ndarray):
@@ -744,7 +816,7 @@ def generate_2d_grid_realizations(
delx = x.max() - x.min()
dely = y.max() - y.min()
- variorange = np.zeros((nnodes)) + max(delx,dely) / 10 # ?
+ variorange = np.zeros((nnodes)) + max(delx, dely) / 10 # ?
elif not isinstance(variorange, np.ndarray):
variorange = np.zeros((nnodes)) + variorange
@@ -760,8 +832,6 @@ def generate_2d_grid_realizations(
# TODO warn here
zone_array = zone_array.astype(int)
-
-
power = 1.0
lib = PestUtilsLib()
@@ -828,33 +898,29 @@ def __init__(self, delr, delc, xul, yul, rotation=0.0):
self._ycentergrid = None
@property
- def xll(self)->float:
- """lower left x coord
- """
+ def xll(self) -> float:
+ """lower left x coord"""
xll = self.xul - (np.sin(self.theta) * self.yedge[0])
return xll
@property
- def yll(self)->float:
- """lower left y coord
- """
+ def yll(self) -> float:
+ """lower left y coord"""
yll = self.yul - (np.cos(self.theta) * self.yedge[0])
return yll
@property
- def nrow(self)->int:
- """number of rows
- """
+ def nrow(self) -> int:
+ """number of rows"""
return self.delc.shape[0]
@property
- def ncol(self)->int:
- """number of cols
- """
+ def ncol(self) -> int:
+ """number of cols"""
return self.delr.shape[0]
@classmethod
- def from_gridspec(cls, gridspec_file)->SpatialReference:
+ def from_gridspec(cls, gridspec_file) -> SpatialReference:
"""instantiate from a pest-style grid specification file
Parameters
----------
@@ -902,71 +968,61 @@ def from_gridspec(cls, gridspec_file)->SpatialReference:
return cls(np.array(delr), np.array(delc), xul=xul, yul=yul, rotation=rot)
@property
- def theta(self)->float:
- """rotation in radians
- """
+ def theta(self) -> float:
+ """rotation in radians"""
return -self.rotation * np.pi / 180.0
@property
- def xedge(self)->np.NDArray[float]:
- """the xedge array of the grid
- """
+ def xedge(self) -> np.NDArray[float]:
+ """the xedge array of the grid"""
return self.get_xedge_array()
@property
- def yedge(self)->np.NDArray[float]:
- """the yedge array of the grid
- """
+ def yedge(self) -> np.NDArray[float]:
+ """the yedge array of the grid"""
return self.get_yedge_array()
@property
- def xgrid(self)->np.NDArray[float]:
- """xgrid array
- """
+ def xgrid(self) -> np.NDArray[float]:
+ """xgrid array"""
if self._xgrid is None:
self._set_xygrid()
return self._xgrid
@property
- def ygrid(self)->np.NDArray[float]:
- """ygrid array
- """
+ def ygrid(self) -> np.NDArray[float]:
+ """ygrid array"""
if self._ygrid is None:
self._set_xygrid()
return self._ygrid
@property
- def xcenter(self)->np.NDArray[float]:
- """grid x center array
- """
+ def xcenter(self) -> np.NDArray[float]:
+ """grid x center array"""
return self.get_xcenter_array()
@property
- def ycenter(self)->np.NDArray[float]:
- """grid y center array
- """
+ def ycenter(self) -> np.NDArray[float]:
+ """grid y center array"""
return self.get_ycenter_array()
@property
- def ycentergrid(self)->np.NDArray[float]:
- """grid y center array
- """
+ def ycentergrid(self) -> np.NDArray[float]:
+ """grid y center array"""
if self._ycentergrid is None:
self._set_xycentergrid()
return self._ycentergrid
@property
- def xcentergrid(self)->np.NDArray[float]:
- """grid x center array
- """
+ def xcentergrid(self) -> np.NDArray[float]:
+ """grid x center array"""
if self._xcentergrid is None:
self._set_xycentergrid()
return self._xcentergrid
@property
- def areagrid(self)->np.NDArray[float]:
- """area of grid nodes
- """
+ def areagrid(self) -> np.NDArray[float]:
+ """area of grid nodes"""
dr, dc = np.meshgrid(self.delr, self.delc)
return dr * dc
@@ -980,7 +1036,7 @@ def _set_xygrid(self):
self._xgrid, self._ygrid = np.meshgrid(self.xedge, self.yedge)
self._xgrid, self._ygrid = self.transform(self._xgrid, self._ygrid)
- def get_xedge_array(self)->np.NDArray[float]:
+ def get_xedge_array(self) -> np.NDArray[float]:
"""
a numpy one-dimensional float array that has the cell edge x
coordinates for every column in the grid in model space - not offset
@@ -993,7 +1049,7 @@ def get_xedge_array(self)->np.NDArray[float]:
xedge = np.concatenate(([0.0], np.add.accumulate(self.delr)))
return xedge
- def get_yedge_array(self)->np.NDArray[float]:
+ def get_yedge_array(self) -> np.NDArray[float]:
"""
a numpy one-dimensional float array that has the cell edge y
coordinates for every row in the grid in model space - not offset or
@@ -1007,7 +1063,7 @@ def get_yedge_array(self)->np.NDArray[float]:
yedge = np.concatenate(([length_y], length_y - np.add.accumulate(self.delc)))
return yedge
- def get_xcenter_array(self)->np.NDArray[float]:
+ def get_xcenter_array(self) -> np.NDArray[float]:
"""
a numpy one-dimensional float array that has the cell center x
coordinate for every column in the grid in model space - not offset or rotated.
@@ -1019,7 +1075,7 @@ def get_xcenter_array(self)->np.NDArray[float]:
x = np.add.accumulate(self.delr) - 0.5 * self.delr
return x
- def get_ycenter_array(self)->np.NDArray[float]:
+ def get_ycenter_array(self) -> np.NDArray[float]:
"""
a numpy one-dimensional float array that has the cell center x
coordinate for every row in the grid in model space - not offset of rotated.
@@ -1071,7 +1127,7 @@ def transform(self, x, y, inverse=False):
y -= self.yll
return x, y
- def get_extent(self)->tuple[float]:
+ def get_extent(self) -> tuple[float]:
"""
Get the extent of the rotated and offset grid
@@ -1100,7 +1156,7 @@ def get_extent(self)->tuple[float]:
return (xmin, xmax, ymin, ymax)
- def get_vertices(self, i, j)->list[list[float]]:
+ def get_vertices(self, i, j) -> list[list[float]]:
"""Get vertices for a single cell or sequence if i, j locations."""
pts = []
xgrid, ygrid = self.xgrid, self.ygrid
@@ -1115,7 +1171,7 @@ def get_vertices(self, i, j)->list[list[float]]:
vrts = np.array(pts).transpose([2, 0, 1])
return [v.tolist() for v in vrts]
- def get_ij(self, x, y)->tuple(int):
+ def get_ij(self, x, y) -> tuple(int):
"""Return the row and column of a point or sequence of points
in real-world coordinates.
@@ -1131,7 +1187,6 @@ def get_ij(self, x, y)->tuple(int):
return r, c
def write_gridspec(self, filename):
-
"""write a PEST-style grid specification file
Parameters
----------
@@ -1157,4 +1212,3 @@ def write_gridspec(self, filename):
f.write("{0:15.6E} ".format(c))
f.write("\n")
return
-
diff --git a/pypestutils/logger.py b/pypestutils/logger.py
index 2c19ac5..d88393d 100644
--- a/pypestutils/logger.py
+++ b/pypestutils/logger.py
@@ -1,4 +1,5 @@
"""Logger module."""
+
from __future__ import annotations
__all__ = ["get_logger"]
diff --git a/pypestutils/pestutilslib.py b/pypestutils/pestutilslib.py
index e682a09..10403fa 100644
--- a/pypestutils/pestutilslib.py
+++ b/pypestutils/pestutilslib.py
@@ -1,4 +1,5 @@
"""Mid-level pestutilslib module to implement ctypes functions."""
+
from __future__ import annotations
import logging
diff --git a/tests/common.py b/tests/common.py
index 888529a..3be43e6 100644
--- a/tests/common.py
+++ b/tests/common.py
@@ -1,4 +1,5 @@
"""Common functions for tests. For pytest fixtures, see conftest.py."""
+
from pathlib import Path
# key is name, value is number of arguments
diff --git a/tests/test_crash.py b/tests/test_crash.py
index a0e5ba6..1907b2d 100644
--- a/tests/test_crash.py
+++ b/tests/test_crash.py
@@ -1,4 +1,5 @@
"""Test previous crash scenarios."""
+
import numpy as np
import pytest
diff --git a/tests/test_ctypes_declarations.py b/tests/test_ctypes_declarations.py
index 93a494b..f12fcd7 100644
--- a/tests/test_ctypes_declarations.py
+++ b/tests/test_ctypes_declarations.py
@@ -1,4 +1,5 @@
"""Tests for ctypes_declarations module."""
+
from ctypes import c_int
import pytest
diff --git a/tests/test_data.py b/tests/test_data.py
index 989d4f5..f13fe94 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -1,4 +1,5 @@
"""Tests for data module."""
+
import numpy as np
import pytest
diff --git a/tests/test_drivers.py b/tests/test_drivers.py
index 68123dd..328c032 100644
--- a/tests/test_drivers.py
+++ b/tests/test_drivers.py
@@ -1,4 +1,5 @@
"""John Doherty's driver test programs."""
+
from pathlib import Path
import numpy as np
diff --git a/tests/test_enum.py b/tests/test_enum.py
index ed66e31..24b4254 100644
--- a/tests/test_enum.py
+++ b/tests/test_enum.py
@@ -1,4 +1,5 @@
"""Tests for enum module."""
+
import pytest
from pypestutils.enum import KrigType
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
index ce44c7b..9d5b520 100644
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -5,39 +5,48 @@
import pandas as pd
-
def test_mf6_mod2obs():
import pypestutils.helpers as helpers
+
start_datetime = pd.to_datetime("1-1-2000")
dts = []
- for dt in np.linspace(100,300,10):
- dts.append(start_datetime+pd.to_timedelta(dt,unit="d"))
- obs_df = pd.DataFrame({"site":["site1" for _ in range(len(dts))],
- "x":[101 for _ in range(len(dts))],
- "y":[221 for _ in range(len(dts))],
- "layer":[1 for _ in range(len(dts))]})
- obs_df.loc[:,"datetime"] = dts
-
- ws = os.path.join("tests","data","p09")
+ for dt in np.linspace(100, 300, 10):
+ dts.append(start_datetime + pd.to_timedelta(dt, unit="d"))
+ obs_df = pd.DataFrame(
+ {
+ "site": ["site1" for _ in range(len(dts))],
+ "x": [101 for _ in range(len(dts))],
+ "y": [221 for _ in range(len(dts))],
+ "layer": [1 for _ in range(len(dts))],
+ }
+ )
+ obs_df.loc[:, "datetime"] = dts
+
+ ws = os.path.join("tests", "data", "p09")
case = "gwf-p09-mf6"
- obs_csv = os.path.join(ws,"obs.csv")
+ obs_csv = os.path.join(ws, "obs.csv")
obs_df.to_csv(obs_csv)
- dv_name = os.path.join(ws,case+".hds")
- grb_name = os.path.join(ws,case+".dis.grb")
+ dv_name = os.path.join(ws, case + ".hds")
+ grb_name = os.path.join(ws, case + ".dis.grb")
- results = helpers.mod2obs_mf6(grb_name,dv_name,obs_csv,31,start_datetime,1,"head")
+ results = helpers.mod2obs_mf6(
+ grb_name, dv_name, obs_csv, 31, start_datetime, 1, "head"
+ )
iresults = results["interpolated_results"]
assert iresults.shape[0] == obs_df.shape[0]
- dv_name = os.path.join(ws,"gwt-p09-mf6.ucn")
- results = helpers.mod2obs_mf6(grb_name,dv_name,obs_csv,31,start_datetime,1,"concentration")
+ dv_name = os.path.join(ws, "gwt-p09-mf6.ucn")
+ results = helpers.mod2obs_mf6(
+ grb_name, dv_name, obs_csv, 31, start_datetime, 1, "concentration"
+ )
iresults = results["interpolated_results"]
assert iresults.shape[0] == obs_df.shape[0]
if __name__ == "__main__":
import sys
- sys.path.insert(0,os.path.join(".."))
+
+ sys.path.insert(0, os.path.join(".."))
test_mf6_mod2obs()
diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py
index 3b04351..3f37d21 100644
--- a/tests/test_notebooks.py
+++ b/tests/test_notebooks.py
@@ -1,4 +1,5 @@
"""Tests for example notebooks."""
+
import logging
import pytest
import os
@@ -27,17 +28,27 @@
# #os.chdir(cwd)
# return
+
def test_notebooks():
pyemu = pytest.importorskip("pyemu")
nb_dir = os.path.join("examples")
nb_files = [f for f in os.listdir(nb_dir) if f.endswith(".ipynb")]
for nb_file in nb_files:
- pyemu.os_utils.run("jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --inplace {0}".format(nb_file),cwd=nb_dir)
- pyemu.os_utils.run("jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --allow-errors --inplace {0}".format(nb_file),cwd=nb_dir)
+ pyemu.os_utils.run(
+ "jupyter nbconvert --execute --ExecutePreprocessor.timeout=180000 --inplace {0}".format(
+ nb_file
+ ),
+ cwd=nb_dir,
+ )
+ pyemu.os_utils.run(
+ "jupyter nbconvert --ClearOutputPreprocessor.enabled=True --ClearMetadataPreprocessor.enabled=True --allow-errors --inplace {0}".format(
+ nb_file
+ ),
+ cwd=nb_dir,
+ )
return
-
-#if __name__ == "__main__":
- #test_notebook("pypestutils_pstfrom_structured_mf6.ipynb")
+# if __name__ == "__main__":
+# test_notebook("pypestutils_pstfrom_structured_mf6.ipynb")
diff --git a/tests/test_pestutilslib.py b/tests/test_pestutilslib.py
index b0825ee..0f49958 100644
--- a/tests/test_pestutilslib.py
+++ b/tests/test_pestutilslib.py
@@ -1,4 +1,5 @@
"""Tests for pestutilslib module."""
+
import logging
from pathlib import PureWindowsPath
@@ -34,109 +35,82 @@ def test_create_char_array():
lib.create_char_array(1, "LENGRIDNAME")
-def test_inquire_modflow_binary_file_specs():
- ...
+def test_inquire_modflow_binary_file_specs(): ...
-def test_retrieve_error_message():
- ...
+def test_retrieve_error_message(): ...
-def test_install_structured_grid():
- ...
+def test_install_structured_grid(): ...
-def test_get_cell_centres_structured():
- ...
+def test_get_cell_centres_structured(): ...
-def test_uninstall_structured_grid():
- ...
+def test_uninstall_structured_grid(): ...
-def test_free_all_memory():
- ...
+def test_free_all_memory(): ...
-def test_interp_from_structured_grid():
- ...
+def test_interp_from_structured_grid(): ...
-def test_interp_to_obstime():
- ...
+def test_interp_to_obstime(): ...
-def test_install_mf6_grid_from_file():
- ...
+def test_install_mf6_grid_from_file(): ...
-def test_get_cell_centres_mf6():
- ...
+def test_get_cell_centres_mf6(): ...
-def test_uninstall_mf6_grid():
- ...
+def test_uninstall_mf6_grid(): ...
-def test_calc_mf6_interp_factors():
- ...
+def test_calc_mf6_interp_factors(): ...
-def test_interp_from_mf6_depvar_file():
- ...
+def test_interp_from_mf6_depvar_file(): ...
-def test_extract_flows_from_cbc_file():
- ...
+def test_extract_flows_from_cbc_file(): ...
-def test_calc_kriging_factors_2d():
- ...
+def test_calc_kriging_factors_2d(): ...
-def test_calc_kriging_factors_auto_2d():
- ...
+def test_calc_kriging_factors_auto_2d(): ...
-def test_calc_kriging_factors_3d():
- ...
+def test_calc_kriging_factors_3d(): ...
-def test_krige_using_file():
- ...
+def test_krige_using_file(): ...
-def test_build_covar_matrix_2d():
- ...
+def test_build_covar_matrix_2d(): ...
-def test_build_covar_matrix_3d():
- ...
+def test_build_covar_matrix_3d(): ...
-def test_calc_structural_overlay_factors():
- ...
+def test_calc_structural_overlay_factors(): ...
-def test_interpolate_blend_using_file():
- ...
+def test_interpolate_blend_using_file(): ...
-def test_ipd_interpolate_2d():
- ...
+def test_ipd_interpolate_2d(): ...
-def test_ipd_interpolate_3d():
- ...
+def test_ipd_interpolate_3d(): ...
-def test_initialize_randgen():
- ...
+def test_initialize_randgen(): ...
-def test_fieldgen2d_sva():
- ...
+def test_fieldgen2d_sva(): ...
-def test_fieldgen3d_sva():
- ...
+def test_fieldgen3d_sva(): ...