diff --git a/pyproject.toml b/pyproject.toml index 119dbd7..3948c48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ dependencies = [ "natsort>=8.3.0", "pymongo>=4.4", "pysmartdl2>=2.0.0", + "plotly>=5.15.0", "dnspython", "ruamel.yaml", "colorama" @@ -48,7 +49,8 @@ dev = [ "coremltools>=7.0", "onnxconverter_common>=1.14.0", "onnxsim==0.4.33", - "onnxruntime>=1.16.0" + "onnxruntime>=1.16.0", + "optimade[http_client]>=1.0.0" ] [project.urls] diff --git a/pysipfenn/__init__.py b/pysipfenn/__init__.py index c87ba6a..56ede84 100644 --- a/pysipfenn/__init__.py +++ b/pysipfenn/__init__.py @@ -1,3 +1,4 @@ -print('Importing from top pySIPFENN namespace...') +# Importing from top pySIPFENN namespace... from pysipfenn.core.pysipfenn import * from pysipfenn.core.modelExporters import * +from pysipfenn.core.modelAdjusters import * diff --git a/pysipfenn/core/modelAdjusters.py b/pysipfenn/core/modelAdjusters.py new file mode 100644 index 0000000..2eef925 --- /dev/null +++ b/pysipfenn/core/modelAdjusters.py @@ -0,0 +1,858 @@ +# Standard library imports +import os +from typing import Union, Literal, Tuple, List, Dict +from copy import deepcopy +import gc +from functools import reduce +import operator +from random import shuffle +import time + +# Default 3rd party imports +import numpy as np +import torch +from torch.utils.data import DataLoader, TensorDataset +from pysipfenn.core.pysipfenn import Calculator +from pymatgen.core import Structure, Composition + +import plotly.express as px +import plotly.graph_objects as go + + +class LocalAdjuster: + """ + Adjuster class taking a ``Calculator`` and operating on local data provided to model as a pair of descriptor data + (provided in several ways) and target values (provided in several ways). It can then adjust the model with some predefined + hyperparameters or run a fairly typical grid search, which can be interpreted manually or uploaded to the ClearML + platform. Can use CPU, CUDA, or MPS (Mac M1) devices for training. + + Args: + calculator: Instance of the ``Calculator`` class with the model to be adjusted, defined and loaded. It can + contain the descriptor data already in it, so that it does not have to be provided separately. + model: Name of the model to be adjusted in the ``Calculator``. E.g., ``SIPFENN_Krajewski2022_NN30``. + targetData: Target data to be used for training the model. It can be provided as a path to a NumPy ``.npy``/ + ``.NPY`` or CSV ``.csv``/``.CSV`` file, or directly as a NumPy array. It has to be the same length as the + descriptor data. + descriptorData: Descriptor data to be used for training the model. It can be left unspecified (``None``) to + use the data in the ``Calculator``, or provided as a path to a NumPy ``.npy``/``.NPY`` or CSV ``.csv``/ + ``.CSV`` file, or directly as a NumPy array. It has to be the same length as the target data. Default is + ``None``. + device: Device to be used for training the model. It Has to be one of the following: ``"cpu"``, ``"cuda"``, or + ``"mps"``. Default is ``"cpu"``. + descriptor: Name of the feature vector provided in the descriptorData. It can be optionally provided to + check if the descriptor data is compatible. + useClearML: Whether to use the ClearML platform for logging the training process. Default is ``False``. + taskName: Name of the task to be used. Default is ``"LocalFineTuning"``. + + Attributes: + calculator: Instance of the ``Calculator`` class being operated on. + model: The original model to be adjusted. + adjustedModel: A PyTorch model after the adjustment. Initially set to ``None``. + descriptorData: NumPy array with descriptor data to use as input for the model. + targetData: NumPy array with target data to use as output for the model. + """ + + def __init__( + self, + calculator: Calculator, + model: str, + targetData: Union[str, np.ndarray], + descriptorData: Union[None, str, np.ndarray] = None, + device: Literal["cpu", "cuda", "mps"] = "cpu", + descriptor: Literal["Ward2017", "KS2022"] = None, + useClearML: bool = False, + taskName: str = "LocalFineTuning" + ) -> None: + self.adjustedModel = None + self.useClearML = useClearML + self.useClearMLMessageDisplayed = False + self.taskName = taskName + + assert isinstance(calculator, Calculator), "The calculator must be an instance of the Calculator class." + self.calculator = calculator + + self.device = torch.device(device) + + assert isinstance(model, str), "The model must be a string pointing to the model to be adjusted in the Calculator." + assert model in self.calculator.models, "The model must be one of the models in the Calculator." + assert model in self.calculator.loadedModels, "The model must be loaded in the Calculator." + self.modelName = model + self.model = self.calculator.loadedModels[model] + self.model = self.model.to(device=self.device) + + if descriptorData is None: + assert self.calculator.descriptorData is not None, "The descriptor data can be inferred from the data in the Calculator, but no data is present." + self.descriptorData = self.calculator.descriptorData + elif isinstance(descriptorData, np.ndarray): + self.descriptorData = descriptorData + elif isinstance(descriptorData, str): + # Path to NPY file with data + if (descriptorData.endswith(".npy") or descriptorData.endswith(".NPY")) and os.path.exists(descriptorData): + self.descriptorData = np.load(descriptorData) + # Path to CSV file with data + elif (descriptorData.endswith(".csv") or descriptorData.endswith(".CSV")) and os.path.exists(descriptorData): + self.descriptorData = np.loadtxt(descriptorData, delimiter=",", skiprows=1)[:, 1:] + else: + raise ValueError("If a string is provided as descriptor data parameter, it must be a path to a npy/NPY or csv/CSV file.") + else: + print(descriptorData) + raise ValueError("The descriptor data must be either (a) None to use the data in the Calculator," + "(b) a path to a npy/NPY file, or (c) a path to a csv/CSV file.") + + if isinstance(targetData, np.ndarray): + self.targetData = targetData + elif isinstance(targetData, str): + # Path to NPY file with data + if (targetData.endswith(".npy") or targetData.endswith(".NPY")) and os.path.exists(targetData): + self.targetData = np.load(targetData) + # Path to CSV file with data + elif (targetData.endswith(".csv") or targetData.endswith(".CSV")) and os.path.exists(targetData): + # Skip the first row if it is a header + self.targetData = np.loadtxt(targetData, delimiter=",", skiprows=1)[:, 1:] + else: + raise ValueError("If a string is provided as target data parameter, it must be a path to a npy/NPY or csv/CSV file.") + else: + raise ValueError("The target data must be either a path to a npy/NPY file or a path to a csv/CSV file.") + + assert len(self.descriptorData) == len(self.targetData), "The descriptor and target data must have the same length." + + if descriptor is not None: + if descriptor == "Ward2017": + assert self.descriptorData.shape[1] == 271, "The descriptor must have 271 features for the Ward2017 descriptor." + elif descriptor == "KS2022": + assert self.descriptorData.shape[1] == 256, "The descriptor must have 256 features for the KS2022 descriptor." + else: + raise NotImplementedError("The descriptor must be either 'Ward2017' or 'KS2022'. Others will be added in the future.") + + self.comps: List[str] = [] + self.names: List[str] = [] + self.validationLabels: List[str] = [] + + print("Initialized Adjuster instance!\n") + + def plotStarting(self) -> None: + """ + Plot the starting model (before adjustment) on the target data. By default, it will plot in your browser. + """ + reference = self.targetData.flatten() + assert len(reference) == len(self.descriptorData), "The target data and descriptor data must have the same length." + assert len(reference) != 0, "The target data must not be empty for plotting." + self.model.eval() + print("Running the STARTING model on the data and plotting the results...") + with torch.no_grad(): + dataIn = torch.from_numpy(np.array(self.descriptorData)).float().to(device=self.device) + predictions = self.model(dataIn, None).detach().cpu().numpy().flatten() + minVal = min(min(reference), min(predictions)) + maxVal = max(max(reference), max(predictions)) + + if self.names: + fig = px.scatter( + x=reference, + y=predictions, + hover_name=self.names, + labels={"x": "Target Data", "y": "Predictions"}, + title="Starting (Unadjusted) Model Predictions (Hover for Name)" + ) + else: + fig = px.scatter( + x=reference, + y=predictions, + labels = {"x": "Target Data", "y": "Predictions"}, + title = "Starting (Unadjusted) Model Predictions" + ) + # If the validation labels are set, color the points as blue for training, green for validation, and red for + # any other label, just in case advanced users want to use this method for other purposes. + if self.validationLabels: + print("Overlaying the training and validation labels on the plot.") + fig.update_traces( + marker=dict( + color=[( + "blue" if label == "Training" else + "green" if label == "Validation" else + "red") for label in self.validationLabels], + symbol='circle-dot', + opacity=0.5, + size=12 + ) + ) + else: + fig.update_traces( + marker=dict( + symbol='circle-dot', + opacity=0.5, + size=12 + ) + ) + fig.add_trace( + go.Scatter( + x=[minVal, maxVal], + y=[minVal, maxVal], + mode='lines', + line=dict(color='gray'), + name='x=y' + ) + ) + fig.show() + + def plotAdjusted(self) -> None: + """ + Plot the adjusted model on the target data. By default, it will plot in your browser. + """ + assert self.adjustedModel is not None, "The model must be adjusted before plotting. It is currently None." + self.adjustedModel.eval() + reference = self.targetData.flatten() + assert len(reference) == len(self.descriptorData), "The target data and descriptor data must have the same length." + assert len(reference) != 0, "The target data must not be empty for plotting." + print("Running the ADJUSTED model on the data and plotting the results...") + with torch.no_grad(): + dataIn = torch.from_numpy(np.array(self.descriptorData)).float().to(device=self.device) + predictions = self.adjustedModel(dataIn, None).detach().cpu().numpy().flatten() + minVal = min(min(reference), min(predictions)) + maxVal = max(max(reference), max(predictions)) + + if self.names: + fig = px.scatter( + x=reference, + y=predictions, + hover_name=self.names, + labels={"x": "Target Data", "y": "Predictions"}, + title="Adjusted Model Predictions (Hover for Name)" + ) + else: + fig = px.scatter( + x=reference, + y=predictions, + labels = {"x": "Target Data", "y": "Predictions"}, + title = "Adjusted Model Predictions" + ) + # If the validation labels are set, color the points as blue for training, green for validation, and red for + # any other label, just in case advanced users want to use this method for other purposes. + if self.validationLabels: + print("Overlaying the training and validation labels on the plot.") + fig.update_traces( + marker=dict( + color=[( + "blue" if label == "Training" else + "green" if label == "Validation" else + "red") for label in self.validationLabels], + symbol='circle-dot', + opacity=0.5, + size=12 + ) + ) + else: + fig.update_traces( + marker=dict( + symbol='circle-dot', + opacity=0.5, + size=12 + ) + ) + fig.add_trace( + go.Scatter( + x=[minVal, maxVal], + y=[minVal, maxVal], + mode='lines', + line=dict(color='gray'), + name='x=y' + ) + ) + fig.show() + + def adjust( + self, + validation: float = 0.2, + learningRate: float = 1e-5, + epochs: int = 50, + batchSize: int = 32, + optimizer: Literal["Adam", "AdamW", "Adamax", "RMSprop"] = "Adam", + weightDecay: float = 1e-5, + lossFunction: Literal["MSE", "MAE"] = "MAE", + verbose: bool = True + ) -> Tuple[torch.nn.Module, List[float], List[float]]: + """ + Takes the original model, copies it, and adjusts the model on the provided data. The adjusted model is stored in + the ``adjustedModel`` attribute of the class and can be then persisted to the original ``Calculator`` or used + for plotting. The default hyperparameters are selected for fine-tuning the model rather than retraining it, as + to slowly adjust it (1% of the typical learning rate) and not overfit it (50 epochs). + + Args: + learningRate: The learning rate to be used for the adjustment. Default is ``1e-5`` that is 1% of a typical + learning rate of ``Adam`` optimizer. + epochs: The number of times to iterate over the data, i.e., how many times the model will see the data. + Default is ``50``, which is on the higher side for fine-tuning. If the model does not retrain fast enough + but already converged, consider lowering this number to reduce the time and possibly overfitting to the + training data. + batchSize: The number of points passed to the model at once. Default is ``32``, which is a typical batch size for + smaller datasets. If the dataset is large, consider increasing this number to speed up the training. + optimizer: Algorithm to be used for optimization. Default is ``Adam``, which is a good choice for most models + and one of the most popular optimizers. Other options are + lossFunction: Loss function to be used for optimization. Default is ``MAE`` (Mean Absolute Error / L1) that is + more robust to outliers than ``MSE`` (Mean Squared Error). + validation: Fraction of the data to be used for validation. Default is the common ``0.2`` (20% of the data). + If set to ``0``, the model will be trained on the whole dataset without validation, and you will not be able + to check for overfitting or gauge the model's performance on unseen data. + weightDecay: Weight decay to be used for optimization. Default is ``1e-5`` that should work well if data is + abundant enough relative to the model complexity. If the model is overfitting, consider increasing this + number to regularize the model more. + verbose: Whether to print information, such as loss, during the training. Default is ``True``. + + Returns: + A tuple with 3 elements: (1) the adjusted model, (2) training loss list of floats, and (3) validation loss + list of floats. The adjusted model is also stored in the ``adjustedModel`` attribute of the class. + """ + + if verbose: + print("Loading the data...") + assert len(self.descriptorData) != 0, "The descriptor data must not be empty for the adjustment process." + assert len(self.targetData) != 0, "The target data must not be empty for the adjustment process." + assert len(self.descriptorData) == len(self.targetData), "The descriptor and target data must have the same length." + + ddTensor = torch.from_numpy(self.descriptorData).float().to(device=self.device) + tdTensor = torch.from_numpy(self.targetData).float().to(device=self.device) + if validation > 0: + split = int(len(ddTensor) * (1 - validation)) + self.validationLabels = ["Training"]*split + ["Validation"]*(len(ddTensor)-split) + ddTrain, ddVal = ddTensor[:split], ddTensor[split:] + tdTrain, tdVal = tdTensor[:split], tdTensor[split:] + else: + self.validationLabels = ["Training"]*len(ddTensor) + ddTrain, ddVal = ddTensor, None + tdTrain, tdVal = tdTensor, None + + datasetTrain = TensorDataset(ddTrain, tdTrain) + dataloaderTrain = DataLoader(datasetTrain, batch_size=batchSize, shuffle=True) + + if verbose: + print(f'LR: {learningRate} | Optimizer: {optimizer} | Weight Decay: {weightDecay} | Loss: {lossFunction}') + # Training a logging platform. Completely optional and does not affect the training. + if self.useClearML: + if verbose and not self.useClearMLMessageDisplayed: + print("Using ClearML for logging. Make sure to have (1) their Python package installed and (2) the API key" + " set up according to their documentation. Otherwise you will get an error.") + self.useClearMLMessageDisplayed = True + from clearml import Task + task = Task.create(project_name=self.taskName, + task_name=f'LR:{learningRate} OPT:{optimizer} WD:{weightDecay} LS:{lossFunction}') + task.set_parameters({'lr': learningRate, + 'epochs': epochs, + 'batch_size': batchSize, + 'weight_decay': weightDecay, + 'loss': lossFunction, + 'optimizer': optimizer, + 'model': self.modelName}) + if verbose: + print("Copying and initializing the model...") + model = deepcopy(self.model) + model.train() + if verbose: + print("Setting up the training...") + if optimizer == "Adam": + optimizerClass = torch.optim.Adam + elif optimizer == "AdamW": + optimizerClass = torch.optim.AdamW + elif optimizer == "Adamax": + optimizerClass = torch.optim.Adamax + elif optimizer == "RMSprop": + optimizerClass = torch.optim.RMSprop + else: + raise NotImplementedError("The optimizer must be one of the following: 'Adam', 'AdamW', 'Adamax', 'RMSprop'.") + optimizerInstance = optimizerClass(model.parameters(), lr=learningRate, weight_decay=weightDecay) + + if lossFunction == "MSE": + loss = torch.nn.MSELoss() + elif lossFunction == "MAE": + loss = torch.nn.L1Loss() + else: + raise NotImplementedError("The loss function must be one of the following: 'MSE', 'MAE'.") + + transferLosses = [float(loss(model(ddTrain, None), tdTrain))] + if validation > 0: + validationLosses = [float(loss(model(ddVal, None), tdVal))] + if verbose: + print( + f'Train: {transferLosses[-1]:.4f} | Validation: {validationLosses[-1]:.4f} | Epoch: 0/{epochs}') + else: + validationLosses = [] + if verbose: + print(f'Train: {transferLosses[-1]:.4f} | Epoch: 0/{epochs}') + + for epoch in range(epochs): + model.train() + for data, target in dataloaderTrain: + optimizerInstance.zero_grad() + output = model(data, None) + lossValue = loss(output, target) + lossValue.backward() + optimizerInstance.step() + transferLosses.append(float(loss(model(ddTrain, None), tdTrain))) + + if validation > 0: + model.eval() + validationLosses.append(float(loss(model(ddVal, None), tdVal))) + model.train() + if self.useClearML: + task.get_logger().report_scalar( + title='Loss', + series='Validation', + value=validationLosses[-1], + iteration=epoch+1) + if verbose: + print( + f'Train: {transferLosses[-1]:.4f} | Validation: {validationLosses[-1]:.4f} | Epoch: {epoch + 1}/{epochs}') + else: + if verbose: + print(f'Train: {transferLosses[-1]:.4f} | Epoch: {epoch + 1}/{epochs}') + + if self.useClearML: + task.get_logger().report_scalar( + title='Loss', + series='Training', + value=transferLosses[-1], + iteration=epoch+1) + + print("Training finished!") + if self.useClearML: + task.close() + model.eval() + self.adjustedModel = model + del model + del optimizerInstance + del loss + gc.collect() + print("All done!") + + return self.adjustedModel, transferLosses, validationLosses + + def matrixHyperParameterSearch( + self, + validation: float = 0.2, + epochs: int = 20, + batchSize: int = 64, + lossFunction: Literal["MSE", "MAE"] = "MAE", + learningRates: List[float] = (1e-6, 1e-5, 1e-4), + optimizers: List[Literal["Adam", "AdamW", "Adamax", "RMSprop"]] = ("Adam", "AdamW", "Adamax"), + weightDecays: List[float] = (1e-5, 1e-4, 1e-3), + verbose: bool = True, + plot: bool = True + ) -> Tuple[torch.nn.Module, Dict[str, Union[float, str]]]: + """ + Performs a grid search over the hyperparameters provided to find the best combination. By default, it will + plot the training history with plotly in your browser, and (b) print the best hyperparameters found. If the + ClearML platform was set to be used for logging (at the class initialization), the results will be uploaded + there as well. If the default values are used, it will test 27 combinations of learning rates, optimizers, and + weight decays. The method will then adjust the model to the best hyperparameters found, corresponding to the + lowest validation loss if validation is used, or the lowest training loss if validation is not used + (``validation=0``). Note that the validation is used by default. + + Args: + validation: Same as in the ``adjust`` method. Default is ``0.2``. + epochs: Same as in the ``adjust`` method. Default is ``20`` to keep the search time reasonable on most + CPU-only machines (around 1 hour). For most cases, a good starting number of epochs is 100-200, which + should complete in 10-30 minutes on most modern GPUs or Mac M1-series machines (w. device set to MPS). + batchSize: Same as in the ``adjust`` method. Default is ``32``. + lossFunction: Same as in the ``adjust`` method. Default is ``MAE``, i.e. Mean Absolute Error or L1 loss. + learningRates: List of floats with the learning rates to be tested. Default is ``(1e-6, 1e-5, 1e-4)``. See + the ``adjust`` method for more information. + optimizers: List of strings with the optimizers to be tested. Default is ``("Adam", "AdamW", "Adamax")``. See + the ``adjust`` method for more information. + weightDecays: List of floats with the weight decays to be tested. Default is ``(1e-5, 1e-4, 1e-3)``. See + the ``adjust`` method for more information. + verbose: Same as in the ``adjust`` method. Default is ``True``. + plot: Whether to plot the training history after all the combinations are tested. Default is ``True``. + """ + nTasks = len(learningRates) * len(optimizers) * len(weightDecays) + if verbose: + print("Starting the hyperparameter search...") + print(f"{nTasks} combinations will be tested.\n") + + bestModel: torch.nn.Module = None + bestTrainingLoss: float = np.inf + bestValidationLoss: float = np.inf + bestHyperparameters: Dict[str, Union[float, str, None]] = { + "learningRate": None, + "optimizer": None, + "weightDecay": None, + "epochs": None + } + + trainLossHistory: List[List[float]] = [] + validationLossHistory: List[List[float]] = [] + labels: List[str] = [] + tasksDone = 0 + t0 = time.perf_counter() + + for learningRate in learningRates: + for optimizer in optimizers: + for weightDecay in weightDecays: + labels.append(f"LR: {learningRate} | OPT: {optimizer} | WD: {weightDecay}") + model, trainingLoss, validationLoss = self.adjust( + validation=validation, + learningRate=learningRate, + epochs=epochs, + batchSize=batchSize, + optimizer=optimizer, + weightDecay=weightDecay, + lossFunction=lossFunction, + verbose=True + ) + trainLossHistory.append(trainingLoss) + validationLossHistory.append(validationLoss) + if validation > 0: + localBestValidationLoss, bestEpoch = min((val, idx) for idx, val in enumerate(validationLoss)) + if localBestValidationLoss < bestValidationLoss: + print(f"New best model found with LR: {learningRate}, OPT: {optimizer}, WD: {weightDecay}, " + f"Epoch: {bestEpoch + 1}/{epochs} | Train: {trainingLoss[bestEpoch]:.4f} | " + f"Validation: {localBestValidationLoss:.4f}") + del bestModel + gc.collect() + bestModel = model + bestTrainingLoss = trainingLoss[bestEpoch] + bestValidationLoss = localBestValidationLoss + bestHyperparameters["learningRate"] = learningRate + bestHyperparameters["optimizer"] = optimizer + bestHyperparameters["weightDecay"] = weightDecay + bestHyperparameters["epochs"] = bestEpoch + 1 + else: + print(f"Model with LR: {learningRate}, OPT: {optimizer}, WD: {weightDecay} did not improve.") + else: + localBestTrainingLoss, bestEpoch = min((val, idx) for idx, val in enumerate(trainingLoss)) + if localBestTrainingLoss < bestTrainingLoss: + print(f"New best model found with LR: {learningRate}, OPT: {optimizer}, WD: {weightDecay}, " + f"Epoch: {bestEpoch + 1}/{epochs} | Train: {localBestTrainingLoss:.4f}") + del bestModel + gc.collect() + bestModel = model + bestTrainingLoss = localBestTrainingLoss + bestHyperparameters["learningRate"] = learningRate + bestHyperparameters["optimizer"] = optimizer + bestHyperparameters["weightDecay"] = weightDecay + bestHyperparameters["epochs"] = bestEpoch + 1 + else: + print(f"Model with LR: {learningRate}, OPT: {optimizer}, WD: {weightDecay} did not improve.") + + tasksDone += 1 + pastTimePerTask = ((time.perf_counter() - t0)/60) / tasksDone + print(f"Task {tasksDone}/{nTasks} done. Estimated time left: {pastTimePerTask * (nTasks - tasksDone):.2f} minutes.\n") + + + if verbose: + print(f"\n\nBest model found with LR: {bestHyperparameters['learningRate']}, OPT: {bestHyperparameters['optimizer']}, " + f"WD: {bestHyperparameters['weightDecay']}, Epoch: {bestHyperparameters['epochs']}") + if validation > 0: + print(f"Train: {bestTrainingLoss:.4f} | Validation: {bestValidationLoss:.4f}\n") + else: + print(f"Train: {bestTrainingLoss:.4f}\n") + assert bestModel is not None, "The best model was not found. Something went wrong during the hyperparameter search." + self.adjustedModel = bestModel + del bestModel + gc.collect() + + if plot: + fig1 = go.Figure() + for idx, label in enumerate(labels): + fig1.add_trace( + go.Scatter( + x=np.arange(epochs+1), + y=trainLossHistory[idx], + mode='lines+markers', + name=label) + + ) + fig1.update_layout( + title="Training Loss History", + xaxis_title="Epoch", + yaxis_title="Loss", + legend_title="Hyperparameters", + showlegend=True, + template="plotly_white" + ) + fig1.show() + if validation > 0: + fig2 = go.Figure() + for idx, label in enumerate(labels): + fig2.add_trace( + go.Scatter( + x=np.arange(epochs+1), + y=validationLossHistory[idx], + mode='lines+markers', + name=label) + ) + fig2.update_layout( + title="Validation Loss History", + xaxis_title="Epoch", + yaxis_title="Loss", + legend_title="Hyperparameters", + showlegend=True, + template="plotly_white" + ) + fig2.show() + + return self.adjustedModel, bestHyperparameters + + def highlightPoints( + self, + pointsIndices: List[int] + ) -> None: + if not self.validationLabels: + print("No validation labels set yet. Please note highlights will be overwriten by the next adjustemnt call.") + for p in pointsIndices: + assert p < len(self.validationLabels), "The index of the point to be highlighted is out of bounds." + self.validationLabels[p] = "Highlight" + + def highlightCompositions( + self, + compositions: List[str] + ) -> None: + if not self.validationLabels: + print("No validation labels set yet. Please note highlights will be overwriten by the next adjustemnt call.") + assert self.comps, "The compositions must be set before highlighting them. If you use ``OPTIMADEAdjuster``, this is done automatically, but with ``LocalAdjuster``, you have to set them manually." + reducedFormulas = set([Composition(c).reduced_formula for c in compositions]) + for idx, comp in enumerate(self.comps): + if comp in reducedFormulas: + self.validationLabels[idx] = "Highlight" + + +class OPTIMADEAdjuster(LocalAdjuster): + """ + Adjuster class operating on data provided by the OPTIMADE API. Primarily geared towards tuning or retraining of the + models based on other atomistic databases, or their subsets, accessed through OPTIMADE, to adjust the model to a + different domain, which in the context of DFT datasets could mean adjusting the model to predict properties with DFT + settings used by that database or focusing its attention to specific chemistry like, for instance, all compounds of + Sn and all perovskites. It accepts OPTIMADE query as an input and then operates based on the ``LocalAdjuster`` class. + + It will set up the environment for the adjustment, letting you progressively build up the training dataset by + OPTIMADE queries which get featurized and their results will be concatenated, i.e., you can make one big query or + several smaller ones and then adjust the model on the whole dataset when you are ready. + + For details on more advanced uses of the OPTIMADE API client, please refer to [the documentation](https://www.optimade.org/optimade-python-tools/latest/getting_started/client/). + + Args: + calculator: Instance of the ``Calculator`` class with the model to be adjusted, defined and loaded. Unlike in the + ``LocalAdjuster``, the descriptor data will not be passed, since it will be fetched from the OPTIMADE API. + model: Name of the model to be adjusted in the ``Calculator``. E.g., ``SIPFENN_Krajewski2022_NN30``. + provider: Strings with the name of the provider to be used for the OPTIMADE queries. The type-hinting + gives a list of providers available at the time of writing this code, but it is by no means limited to them. + For the up-to-date list, along with their current status, please refer to the + [OPTIMADE Providers Dashboard](https://optimade.org/providers-dashboard). The default is ``"mp"`` which + stands for the Materials Project, but we do not recommend any particular provider over any other. One has to + be picked to work out of the box. Your choice should be based on the data you are interested in. + targetPath: List of strings with the path to the target data in the OPTIMADE response. This will be dependent + on the provider you choose, and you will need to identify it by looking at the response. The easiest way to + do this is by going to their endpoint, like + [this, very neat one, for JARVIS](https://jarvis.nist.gov/optimade/jarvisdft/v1/structures/), + [this one for Alexandria PBEsol](https://alexandria.icams.rub.de/pbesol/v1/structures), + [this one for MP](https://optimade.materialsproject.org/v1/structures), + or [this one for our in-house MPDD](https://optimade.mpdd.org/v1/structures). Examples include + ``('attributes', '_mp_stability', 'gga_gga+u', 'formation_energy_per_atom')`` for GGA+U formation energy + per atom in MP, or ``('attributes', '_alexandria_scan_formation_energy_per_atom')`` for the `SCAN` formation + energy per atom in Alexandria, or ``('attributes', '_alexandria_formation_energy_per_atom')`` for the + ``GGAsol`` formation energy per atom in Alexandria, or ``('attributes', '_jarvis_formation_energy_peratom')`` + for the `optb88vdw` formation energy per atom in JARVIS, or ``('attributes', + '_mpdd_formationenergy_sipfenn_krajewski2020_novelmaterialsmodel')`` for the formation energy predicted + by the SIPFENN_Krajewski2020_NovelMaterialsModel for every structure in MPDD. Default is the MP example. + targetSize: The length of the target data to be fetched from the OPTIMADE API. This is typically ``1`` for a single + scalar property, but it can be more. Default is ``1``. + device: Same as in the ``LocalAdjuster``. Default is ``"cpu"`` which is available on all systems. If you have a + GPU, you can set it to ``"cuda"``, or to ``"mps"`` if you are using a Mac M1-series machine, in order to + speed up the training process by orders of magnitude. + descriptor: *Not* the same as in the ``LocalAdjuster``. Since the descriptor data will be calculated for each + structure fetched from the OPTIMADE API, this parameter is needed to specify which descriptor to use. At the + time of writing this code, it can be either ``"Ward2017"`` or ``"KS2022"``. Special versions of ``KS2022`` + cannot be used since assumptions cannot be made about the data fetched from the OPTIMADE API and only general + symmetry-based optimizations can be applied. Default is ``"KS2022"``. + useClearML: Same as in the ``LocalAdjuster``. Default is ``False``. + taskName: Same as in the ``LocalAdjuster``. Default is ``"OPTIMADEFineTuning"``, and you are encouraged to change + it, especially if you are using the ClearML platform. + maxResults: The maximum number of results to be fetched from the OPTIMADE API for a given query. Default is + ``10000`` which is a very high number for most re-training tasks. If you are fetching a lot of data, it's + possible the query is too broad, and you should consider narrowing it down. + endpointOverride: List of URL strings with the endpoint to be used for the OPTIMADE queries. This is an advanced + option allowing you to ignore the ``provider`` parameter and directly specify the endpoint to be used. It is + useful if you want to use a specific version of the provider's endpoint or narrow down the query to a + sub-database (Alexandria has two different endpoints for PBEsol and SCAN, for instance). You can also use it + to query unofficial endpoints. Make sure to (a) include protocol (``http://`` or ``https://``) and (b) not + include version (``/v1/``), nor the specific endpoint (``/structures``) as the client will add them. I.e., + you want ``https://alexandria.icams.rub.de/pbesol`` rather than + ``alexandria.icams.rub.de/pbesol/v1/structures``. Default is ``None`` which has no effect. + """ + + def __init__( + self, + calculator: Calculator, + model: str, + provider: + Literal[ + "aiida", + "aflow", + "alexandria", + "cod", + "ccpnc", + "cmr", + "httk", + "matcloud", + "mcloud", + "mcloudarchive", + "mp", + "mpdd", + "mpds", + "mpod", + "nmd", + "odbx", + "omdb", + "oqmd", + "jarvis", + "pcod", + "tcod", + "twodmatpedia" + ] = "mp", + targetPath: List[str] = ('attributes', '_mp_stability', 'gga_gga+u', 'formation_energy_per_atom'), + targetSize: int = 1, + device: Literal["cpu", "cuda", "mps"] = "cpu", + descriptor: Literal["Ward2017", "KS2022"] = "KS2022", + useClearML: bool = False, + taskName: str = "OPTIMADEFineTuning", + maxResults: int = 10000, + endpointOverride: List[str] = None + ) -> None: + from optimade.client import OptimadeClient + + assert isinstance(calculator, Calculator), "The calculator must be an instance of the Calculator class." + assert isinstance(model, str), "The model must be a string with the name of the model to be adjusted." + assert isinstance(provider, str), "The provider must be a string with the name of the provider to be used." + assert len(provider) != 0, "The provider must not be an empty string." + assert targetPath and isinstance(targetPath, list) or isinstance(targetPath, tuple), "The target path must be a list of strings pointing to the target data in the OPTIMADE response." + assert len(targetPath) > 0, "The target path must not be empty, i.e., it cannot point to no data." + if provider != "mp" and targetPath == ('attributes', '_mp_stability', 'gga_gga+u', 'formation_energy_per_atom'): + raise ValueError("You are utilizing the default (example) property target path specific to the Materials " + "Project but you are connecting to a different provider. You must adjust the target path " + "to receive data from the provider you are connecting to based on what they serve through " + "their provider-specific OPTIMADE endpoint fields. See targetPath docstring for more info.") + + super().__init__( + calculator=calculator, + model=model, + targetData=np.array([]), + descriptorData=np.array([]), + device=device, + descriptor=None, + useClearML=useClearML, + taskName=taskName, + ) + + self.descriptor = descriptor + self.targetPath = targetPath + self.provider = provider + if endpointOverride is None: + self.client = OptimadeClient( + use_async=False, + include_providers=[provider], + max_results_per_provider=maxResults + ) + else: + assert isinstance(endpointOverride, list) or isinstance(endpointOverride, tuple), "The endpoint override must be a list of strings." + assert len(endpointOverride) != 0, "The endpoint override must not be an empty list." + self.client = OptimadeClient( + use_async=False, + base_urls=endpointOverride, + max_results_per_provider=maxResults + ) + + if self.descriptor == "Ward2017": + self.descriptorData: np.ndarray = np.empty((0, 271)) + elif self.descriptor == "KS2022": + self.descriptorData: np.ndarray = np.empty((0, 256)) + else: + raise NotImplementedError("The descriptor must be either 'Ward2017' or 'KS2022'. Others will be added in the future.") + + self.targetData: np.ndarray = np.empty((0, targetSize)) + + print("Initialized Adjuster instance!\n") + + def fetchAndFeturize( + self, + query: str, + parallelWorkers: int = 1, + verbose: bool = True + ) -> None: + from optimade.adapters.structures import pymatgen as pymatgen_adapter + from optimade.models import StructureResource + + response = self.client.get(query) + providerResponse = response['structures'][query] + respondingProviderURL = list(providerResponse.keys())[0] + data = providerResponse[respondingProviderURL]['data'] + + targetDataStage: List[List[float]] = [] + structs: List[Structure] = [] + comps: List[str] = [] + names: List[str] = [] + missing: List[str] = [] + + if verbose: + print(f"Obtained {len(data)} structures from the OPTIMADE API.") + print("Extracting the data...") + + for datapoint in data: + # OPTIMADE Standard Data + comp = Composition(datapoint['attributes']['chemical_formula_reduced']).reduced_formula + name = comp + '-' + datapoint['id'] + + # Database-specific payload existing at a specific target path (e.g., formation energy per atom in MP) + try: + targetDataStage.append([reduce(operator.getitem, self.targetPath, datapoint)]) + except KeyError: + missing.append(name) + continue + + comps.append(comp) + names.append(name) + # Stage for featurization of the received data + structs.append(pymatgen_adapter.get_pymatgen(StructureResource(**datapoint))) + + if missing: + print(f"\nCould not find the target data at the provided path: {self.targetPath}\nfor {len(missing)} " + f"structures:\n{missing}\n") + + dataIn = list(zip(comps, names, structs, targetDataStage)) + assert len(dataIn) != 0, "No data was fetched from the OPTIMADE API. Please check both the query and the provider." + shuffle(dataIn) + comps, names, structs, targetDataStage = zip(*dataIn) + + self.comps.extend(comps) + self.names.extend(names) + + print(f"Extracted {len(targetDataStage)} datapoints (composition+structure+target) from the OPTIMADE API.") + self.targetData = np.concatenate((self.targetData, np.array(targetDataStage)), axis=0) + + if verbose: + print("Featurizing the structures...") + + if self.descriptor == "Ward2017": + self.calculator.calculate_Ward2017(structs, mode="parallel", max_workers=parallelWorkers) + self.descriptorData = np.concatenate((self.descriptorData, self.calculator.descriptorData), axis=0) + + elif self.descriptor == "KS2022": + self.calculator.calculate_KS2022(structs, mode="parallel", max_workers=parallelWorkers) + self.descriptorData = np.concatenate((self.descriptorData, self.calculator.descriptorData), axis=0) + + else: + raise NotImplementedError("The descriptor must be either 'Ward2017' or 'KS2022'. Others will be added in the future.") + + self.validationLabels = ["Training"]*len(self.descriptorData) + + if verbose: + print("Featurization complete!") + print(f"Current dataset size: " + f"{len(self.names)} with " + f"{len(set(self.names))} unique IDs belonging to " + f"{len(set(self.comps))} unique compositions.\n") + if len(self.names) > len(set(self.names)): + print("Please note that there are duplicate IDs in the dataset. Such degenerate dataset can be used " + "without issues for training (in some occasions may be even desirable to bias the model to areas " + "matching multiple criteria), but the validation error may be underestimated since some data" + "may be in both training and validation set.") + + +if __name__ == '__main__': + pass diff --git a/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.csv b/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.csv new file mode 100644 index 0000000..461fce6 --- /dev/null +++ b/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.csv @@ -0,0 +1,53 @@ +Name,mean_EffectiveCoordination,var_EffectiveCoordination,min_EffectiveCoordination,max_EffectiveCoordination,var_MeanBondLength,min_MeanBondLength,max_MeanBondLength,mean_BondLengthVariation,var_BondLengthVariation,min_BondLengthVariation,max_BondLengthVariation,var_CellVolume,MaxPackingEfficiency,mean_NeighDiff_shell1_Number,var_NeighDiff_shell1_Number,min_NeighDiff_shell1_Number,max_NeighDiff_shell1_Number,range_NeighDiff_shell1_Number,mean_NeighDiff_shell1_MendeleevNumber,var_NeighDiff_shell1_MendeleevNumber,min_NeighDiff_shell1_MendeleevNumber,max_NeighDiff_shell1_MendeleevNumber,range_NeighDiff_shell1_MendeleevNumber,mean_NeighDiff_shell1_AtomicWeight,var_NeighDiff_shell1_AtomicWeight,min_NeighDiff_shell1_AtomicWeight,max_NeighDiff_shell1_AtomicWeight,range_NeighDiff_shell1_AtomicWeight,mean_NeighDiff_shell1_MeltingT,var_NeighDiff_shell1_MeltingT,min_NeighDiff_shell1_MeltingT,max_NeighDiff_shell1_MeltingT,range_NeighDiff_shell1_MeltingT,mean_NeighDiff_shell1_Column,var_NeighDiff_shell1_Column,min_NeighDiff_shell1_Column,max_NeighDiff_shell1_Column,range_NeighDiff_shell1_Column,mean_NeighDiff_shell1_Row,var_NeighDiff_shell1_Row,min_NeighDiff_shell1_Row,max_NeighDiff_shell1_Row,range_NeighDiff_shell1_Row,mean_NeighDiff_shell1_CovalentRadius,var_NeighDiff_shell1_CovalentRadius,min_NeighDiff_shell1_CovalentRadius,max_NeighDiff_shell1_CovalentRadius,range_NeighDiff_shell1_CovalentRadius,mean_NeighDiff_shell1_Electronegativity,var_NeighDiff_shell1_Electronegativity,min_NeighDiff_shell1_Electronegativity,max_NeighDiff_shell1_Electronegativity,range_NeighDiff_shell1_Electronegativity,mean_NeighDiff_shell1_NsValence,var_NeighDiff_shell1_NsValence,min_NeighDiff_shell1_NsValence,max_NeighDiff_shell1_NsValence,range_NeighDiff_shell1_NsValence,mean_NeighDiff_shell1_NpValence,var_NeighDiff_shell1_NpValence,min_NeighDiff_shell1_NpValence,max_NeighDiff_shell1_NpValence,range_NeighDiff_shell1_NpValence,mean_NeighDiff_shell1_NdValence,var_NeighDiff_shell1_NdValence,min_NeighDiff_shell1_NdValence,max_NeighDiff_shell1_NdValence,range_NeighDiff_shell1_NdValence,mean_NeighDiff_shell1_NfValence,var_NeighDiff_shell1_NfValence,min_NeighDiff_shell1_NfValence,max_NeighDiff_shell1_NfValence,range_NeighDiff_shell1_NfValence,mean_NeighDiff_shell1_NValance,var_NeighDiff_shell1_NValance,min_NeighDiff_shell1_NValance,max_NeighDiff_shell1_NValance,range_NeighDiff_shell1_NValance,mean_NeighDiff_shell1_NsUnfilled,var_NeighDiff_shell1_NsUnfilled,min_NeighDiff_shell1_NsUnfilled,max_NeighDiff_shell1_NsUnfilled,range_NeighDiff_shell1_NsUnfilled,mean_NeighDiff_shell1_NpUnfilled,var_NeighDiff_shell1_NpUnfilled,min_NeighDiff_shell1_NpUnfilled,max_NeighDiff_shell1_NpUnfilled,range_NeighDiff_shell1_NpUnfilled,mean_NeighDiff_shell1_NdUnfilled,var_NeighDiff_shell1_NdUnfilled,min_NeighDiff_shell1_NdUnfilled,max_NeighDiff_shell1_NdUnfilled,range_NeighDiff_shell1_NdUnfilled,mean_NeighDiff_shell1_NfUnfilled,var_NeighDiff_shell1_NfUnfilled,min_NeighDiff_shell1_NfUnfilled,max_NeighDiff_shell1_NfUnfilled,range_NeighDiff_shell1_NfUnfilled,mean_NeighDiff_shell1_NUnfilled,var_NeighDiff_shell1_NUnfilled,min_NeighDiff_shell1_NUnfilled,max_NeighDiff_shell1_NUnfilled,range_NeighDiff_shell1_NUnfilled,mean_NeighDiff_shell1_GSvolume_pa,var_NeighDiff_shell1_GSvolume_pa,min_NeighDiff_shell1_GSvolume_pa,max_NeighDiff_shell1_GSvolume_pa,range_NeighDiff_shell1_GSvolume_pa,mean_NeighDiff_shell1_GSbandgap,var_NeighDiff_shell1_GSbandgap,min_NeighDiff_shell1_GSbandgap,max_NeighDiff_shell1_GSbandgap,range_NeighDiff_shell1_GSbandgap,mean_NeighDiff_shell1_GSmagmom,var_NeighDiff_shell1_GSmagmom,min_NeighDiff_shell1_GSmagmom,max_NeighDiff_shell1_GSmagmom,range_NeighDiff_shell1_GSmagmom,NComp,Comp_L2Norm,Comp_L3Norm,Comp_L5Norm,Comp_L7Norm,Comp_L10Norm,mean_Number,maxdiff_Number,dev_Number,max_Number,min_Number,most_Number,mean_MendeleevNumber,maxdiff_MendeleevNumber,dev_MendeleevNumber,max_MendeleevNumber,min_MendeleevNumber,most_MendeleevNumber,mean_AtomicWeight,maxdiff_AtomicWeight,dev_AtomicWeight,max_AtomicWeight,min_AtomicWeight,most_AtomicWeight,mean_MeltingT,maxdiff_MeltingT,dev_MeltingT,max_MeltingT,min_MeltingT,most_MeltingT,mean_Column,maxdiff_Column,dev_Column,max_Column,min_Column,most_Column,mean_Row,maxdiff_Row,dev_Row,max_Row,min_Row,most_Row,mean_CovalentRadius,maxdiff_CovalentRadius,dev_CovalentRadius,max_CovalentRadius,min_CovalentRadius,most_CovalentRadius,mean_Electronegativity,maxdiff_Electronegativity,dev_Electronegativity,max_Electronegativity,min_Electronegativity,most_Electronegativity,mean_NsValence,maxdiff_NsValence,dev_NsValence,max_NsValence,min_NsValence,most_NsValence,mean_NpValence,maxdiff_NpValence,dev_NpValence,max_NpValence,min_NpValence,most_NpValence,mean_NdValence,maxdiff_NdValence,dev_NdValence,max_NdValence,min_NdValence,most_NdValence,mean_NfValence,maxdiff_NfValence,dev_NfValence,max_NfValence,min_NfValence,most_NfValence,mean_NValance,maxdiff_NValance,dev_NValance,max_NValance,min_NValance,most_NValance,mean_NsUnfilled,maxdiff_NsUnfilled,dev_NsUnfilled,max_NsUnfilled,min_NsUnfilled,most_NsUnfilled,mean_NpUnfilled,maxdiff_NpUnfilled,dev_NpUnfilled,max_NpUnfilled,min_NpUnfilled,most_NpUnfilled,mean_NdUnfilled,maxdiff_NdUnfilled,dev_NdUnfilled,max_NdUnfilled,min_NdUnfilled,most_NdUnfilled,mean_NfUnfilled,maxdiff_NfUnfilled,dev_NfUnfilled,max_NfUnfilled,min_NfUnfilled,most_NfUnfilled,mean_NUnfilled,maxdiff_NUnfilled,dev_NUnfilled,max_NUnfilled,min_NUnfilled,most_NUnfilled,mean_GSvolume_pa,maxdiff_GSvolume_pa,dev_GSvolume_pa,max_GSvolume_pa,min_GSvolume_pa,most_GSvolume_pa,mean_GSbandgap,maxdiff_GSbandgap,dev_GSbandgap,max_GSbandgap,min_GSbandgap,most_GSbandgap,mean_GSmagmom,maxdiff_GSmagmom,dev_GSmagmom,max_GSmagmom,min_GSmagmom,most_GSmagmom,frac_sValence,frac_pValence,frac_dValence,frac_fValence,MaxIonicChar,MeanIonicChar +1,12.351999,0.9712921,8.222386,13.209694,0.04054043,0.8785563,1.0439281,0.04636157,0.0106930975,0.032681856,0.06346836,0.10062764,0.5103989,15.9319,6.1703825,7.6254954,46.0,38.374504,2.9644692,1.1850718,1.3918251,10.0,8.608175,43.3118,16.712784,20.775595,122.645,101.86941,219.96661,86.728165,104.15781,695.0,590.84216,1.1857877,0.47402874,0.55673003,4.0,3.44327,0.59289384,0.23701437,0.27836502,2.0,1.721635,12.583264,5.0444484,5.897484,43.0,37.102516,0.35222092,0.123928234,0.17773023,0.5821892,0.404459,0.32790598,0.1609134,0.0,0.6769642,0.6769642,0.0,0.0,0.0,0.0,0.0,1.5136937,0.57675046,0.7314586,4.0,3.2685413,6.4455986,2.3781526,3.1716552,14.0,10.828344,5.259811,1.9041239,2.6149251,10.0,7.3850746,0.32790598,0.1609134,0.0,0.6769642,0.6769642,0.0,0.0,0.0,0.0,0.0,1.5136937,0.57675046,0.7314586,4.0,3.2685413,0.0,0.0,0.0,0.0,0.0,1.1857877,0.47402874,0.55673003,4.0,3.44327,3.6543732,1.4388868,1.731838,11.47,9.738162,0.0,0.0,0.0,0.0,0.0,0.27965,0.26157326,0.0,2.1106627,2.1106627,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.142857,46.0,15.244898,72.0,26.0,72.0,47.142857,10.0,2.7551022,55.0,45.0,45.0,146.14964,122.645,41.58046,178.49,55.845,178.49,2567.7856,1085.0,187.55103,2896.0,1811.0,2506.0,4.857143,4.0,1.1020408,8.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.92857,43.0,11.663265,175.0,132.0,175.0,1.5835714,0.86,0.36459184,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.142857,4.0,1.4693878,6.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.857142,12.0,5.3265305,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.857143,4.0,1.4693878,8.0,4.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.142857,4.0,1.1020408,8.0,4.0,8.0,19.520714,11.47,3.4447958,22.2,10.73,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.15076163,2.1106627,0.27998587,2.1106627,0.0,0.0,0.12371134,0.0,0.22680412,0.6494845,0.1688126,0.069337025 +2,10.749268,1.3515297,9.133969,12.937536,0.036102533,0.92929864,1.0435873,0.08027686,0.032956295,0.0288808,0.12060537,0.0804736,0.5287413,9.149375,3.936822,4.1123157,20.29919,16.186874,21.695059,8.394964,11.879232,36.154922,24.27569,25.168425,10.838523,11.446098,54.908405,43.462307,1041.7916,408.17728,540.7788,1850.7782,1309.9994,5.7968817,2.250725,3.12517,9.972692,6.847522,0.30541304,0.2174865,0.051313564,0.9459963,0.89468277,15.057099,6.0922184,6.952574,33.333755,26.381182,0.26249614,0.15076552,0.0989608,0.7147927,0.6158319,0.3039668,0.2221015,0.04814083,0.9702713,0.92213047,1.7286851,0.6643564,0.9482125,2.7568731,1.8086606,3.797381,1.4937445,1.9226664,7.327266,5.4045997,4.2757826,3.044811,0.71838987,13.243948,12.525558,3.6519403,1.7448139,1.4768864,8.886382,7.409496,0.3039668,0.2221015,0.04814083,0.9702713,0.92213047,1.7286851,0.6643564,0.9482125,2.7568731,1.8086606,3.797381,1.4937445,1.9226664,7.327266,5.4045997,0.0,0.0,0.0,0.0,0.0,2.3395112,0.92201227,1.1733427,4.585559,3.4122162,7.372321,2.9635305,3.5320344,14.917911,11.385876,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.67354,0.66692686,0.6666783,0.6666668,53.0,30.0,6.3333335,72.0,42.0,51.0,72.5,40.0,16.666666,85.0,45.0,85.0,126.915,82.53,17.191668,178.49,95.96,121.76,1502.8534,1992.22,798.76447,2896.0,903.78,903.78,11.666667,11.0,4.4444447,15.0,4.0,15.0,5.1666665,1.0,0.2777778,6.0,5.0,5.0,147.5,36.0,11.333333,175.0,139.0,139.0,1.9433334,0.86,0.21444444,2.16,1.3,2.05,1.8333334,1.0,0.2777778,2.0,1.0,2.0,2.0,3.0,1.3333334,3.0,0.0,3.0,7.8333335,8.0,2.8888888,10.0,2.0,10.0,2.3333333,14.0,3.8888888,14.0,0.0,0.0,14.0,12.0,2.6666667,18.0,6.0,15.0,0.16666667,1.0,0.2777778,1.0,0.0,0.0,2.0,3.0,1.3333334,3.0,0.0,3.0,2.1666667,8.0,2.8888888,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.3333335,5.0,1.7777778,8.0,3.0,3.0,27.355,15.87,5.6066666,31.56,15.69,31.56,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13095239,0.14285715,0.5595238,0.16666667,0.1688126,0.039201893 +3,13.123931,1.9172075,11.686025,15.9997425,0.04540433,0.96594673,1.0681118,0.05769276,0.027517905,0.016413225,0.07833119,0.09487847,0.66863126,11.116471,6.460018,6.6886334,30.496525,23.807892,2.9937696,0.9260386,2.2992406,4.7587056,2.459465,30.315308,17.796822,18.240318,83.70577,65.465454,337.73193,127.578674,242.04791,671.179,429.1311,1.0585506,0.29661858,0.8360867,1.5034822,0.66739553,0.3473895,0.21753684,0.20901953,1.0,0.79098046,11.114781,3.114495,8.778911,15.786563,7.0076528,0.4497202,0.1286315,0.35324657,0.65394205,0.30069545,0.5292753,0.14830929,0.41804335,0.7517411,0.33369777,0.0,0.0,0.0,0.0,0.0,1.5878259,0.44492784,1.25413,2.2552233,1.0010933,4.863453,3.0455158,2.9262733,14.0,11.073727,4.86346,2.582904,2.926282,12.496525,9.570243,0.5292753,0.14830929,0.41804335,0.7517411,0.33369777,0.0,0.0,0.0,0.0,0.0,1.5878259,0.44492784,1.25413,2.2552233,1.0010933,0.0,0.0,0.0,0.0,0.0,1.0585506,0.29661858,0.8360867,1.5034822,0.66739553,3.7912383,1.1490632,2.929441,5.8888345,2.9593937,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.67354,0.66692686,0.6666783,0.6666668,46.666668,32.0,8.444445,72.0,40.0,42.0,48.166668,6.0,2.4444444,50.0,44.0,50.0,108.92567,87.266,23.18811,178.49,91.224,95.96,2703.0,768.0,257.33334,2896.0,2128.0,2896.0,5.3333335,2.0,0.8888889,6.0,4.0,6.0,5.1666665,1.0,0.2777778,6.0,5.0,5.0,161.0,21.0,9.333333,175.0,154.0,154.0,1.8783333,0.86,0.37555555,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,1.3333334,5.0,2.0,5.0,2.3333333,14.0,3.8888888,14.0,0.0,0.0,7.6666665,14.0,3.4444444,18.0,4.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.3333334,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.6666665,2.0,0.8888889,8.0,6.0,6.0,18.025833,7.505,3.1144445,23.195,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.17391305,0.0,0.5217391,0.3043478,0.1688126,0.07268424 +4,12.325356,0.8919849,8.54982,13.099489,0.036544494,0.9055019,1.0362617,0.04900315,0.0052627237,0.04136948,0.05512055,0.09215145,0.53715074,18.460775,8.79385,9.301567,58.0,48.698433,6.224701,3.8250427,2.8233562,33.0,30.176643,49.550694,23.589897,24.993631,150.4045,125.410866,248.75926,118.59482,125.14537,819.0,693.8546,2.0581677,1.1345475,1.0187718,10.0,8.981228,0.7592634,0.3632783,0.37938565,3.0,2.6206143,16.079443,7.69461,8.032099,64.0,55.9679,0.38584435,0.19645673,0.19757147,0.73960054,0.5420291,0.3545328,0.24664883,0.0,0.8600006,0.8600006,0.2698204,0.24716851,0.0,2.0,2.0,1.3334188,0.68098927,0.6831563,2.5800018,1.8968456,6.8522024,3.394916,3.4913964,14.0,10.508603,6.1431365,3.0029135,3.1226258,14.0,10.877375,0.3545328,0.24664883,0.0,0.8600006,0.8600006,0.5396408,0.49433702,0.0,4.0,4.0,2.14288,1.0240693,1.0731568,8.0,6.926843,0.0,0.0,0.0,0.0,0.0,1.2487065,0.5950254,0.6287711,4.0,3.371229,2.5454507,1.4700028,1.3147489,5.598604,4.2838554,0.10428558,0.09553063,0.0,0.773,0.773,0.0,0.0,0.0,0.0,0.0,3.0,0.7491492,0.7208911,0.71463394,0.714308,0.71428615,61.42857,58.0,15.102041,72.0,14.0,72.0,48.42857,33.0,4.897959,78.0,45.0,45.0,150.06183,150.4045,40.611683,178.49,28.0855,178.49,2531.0715,1209.0,156.39796,2896.0,1687.0,2506.0,5.142857,10.0,1.6326531,14.0,4.0,4.0,5.571429,3.0,0.6122449,6.0,3.0,6.0,165.92857,64.0,12.959184,175.0,111.0,175.0,1.5271429,0.86,0.3244898,2.16,1.3,1.3,1.7857143,1.0,0.33673468,2.0,1.0,2.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,2.5,5.0,1.0714285,5.0,0.0,2.0,10.0,14.0,5.714286,14.0,0.0,14.0,14.428572,14.0,5.102041,18.0,4.0,18.0,0.21428572,1.0,0.33673468,1.0,0.0,0.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,6.785714,8.0,1.7346939,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.285714,4.0,1.0204082,8.0,4.0,8.0,20.679285,6.51,2.1724489,22.2,15.69,22.2,0.055214286,0.773,0.10254081,0.773,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12376238,0.00990099,0.17326732,0.6930693,0.1688126,0.06097286 +5,3.8966155,0.03409708,3.8286066,3.9648097,0.010153666,0.97969264,1.0200852,0.7231414,0.0056733405,0.7118187,0.734488,0.013654,0.027560687,14.024233,6.6028457,7.696006,27.229923,19.533918,4.4403477,1.1590973,3.2676368,6.7585425,3.4909058,38.58262,18.4127,20.673601,75.40802,54.734417,234.54356,91.43303,142.95108,417.4096,274.45853,1.6359713,0.4636106,1.1669393,2.5631924,1.3962532,0.46756598,0.23173445,0.23318094,0.9310349,0.6978539,14.724761,4.8674154,9.800841,24.45959,14.658751,0.40139276,0.10850267,0.30224362,0.61839813,0.3161545,0.4648199,0.23446919,0.2299122,0.9337583,0.7038461,0.0,0.0,0.0,0.0,0.0,1.4026979,0.69520336,0.6995428,2.7931046,2.093562,6.5459237,3.2442825,3.2645333,13.034489,9.769956,5.6080456,2.4316251,3.4987488,10.471296,6.9725475,0.4648199,0.23446919,0.2299122,0.9337583,0.7038461,0.0,0.0,0.0,0.0,0.0,1.4026979,0.69520336,0.6995428,2.7931046,2.093562,0.0,0.0,0.0,0.0,0.0,1.6359713,0.4636106,1.1669393,2.5631924,1.3962532,4.028534,1.5087904,2.502319,7.0461144,4.5437956,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,50.0,30.0,11.0,72.0,42.0,43.0,50.25,8.0,2.75,53.0,45.0,53.0,117.6125,82.53,30.43875,178.49,95.96,98.0,2565.5,466.0,165.25,2896.0,2430.0,2430.0,6.0,3.0,1.0,7.0,4.0,7.0,5.25,1.0,0.375,6.0,5.0,5.0,155.75,28.0,9.625,175.0,147.0,147.0,1.815,0.86,0.2575,2.16,1.3,1.9,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.25,3.0,1.125,5.0,2.0,5.0,3.5,14.0,5.25,14.0,0.0,0.0,9.5,12.0,4.25,18.0,6.0,7.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.75,3.0,1.125,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.0,8.0,5.0,5.0,16.615,7.915,2.7925,22.2,14.285,14.285,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.18421052,0.0,0.4473684,0.36842105,0.1688126,0.046808276 +6,9.314539,0.87635905,8.0,9.9718075,0.037257105,0.9441143,1.0279428,0.046325535,0.030883688,7.4334334e-17,0.0694883,0.104166664,0.51013106,19.704193,3.0375273,15.147903,22.0,6.852097,27.974247,6.770664,22.792496,38.13024,15.337746,53.995255,8.496894,41.249912,61.9485,20.698586,1281.8962,260.1751,1030.5299,1672.1589,641.629,2.6173656,0.4115771,2.0,3.0,1.0,0.61736566,0.25508955,0.4260485,1.0,0.5739515,40.617367,5.8081913,31.905079,48.947018,17.04194,0.8143745,0.16708367,0.6017218,1.065,0.46327817,0.61736566,0.25508955,0.4260485,1.0,0.5739515,0.0,0.0,0.0,0.0,0.0,3.2347314,0.53912187,2.4260485,4.0,1.5739515,9.67844,2.8810399,7.0,14.0,7.0,11.061074,2.1564875,8.0,14.295806,6.295806,0.61736566,0.25508955,0.4260485,1.0,0.5739515,0.0,0.0,0.0,0.0,0.0,5.530537,1.0203582,4.0,6.7218547,2.7218544,0.0,0.0,0.0,0.0,0.0,5.530537,1.0782437,4.0,7.147903,3.147903,32.632668,7.6872716,26.529428,44.163574,17.634148,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,56.666668,30.0,10.222222,72.0,42.0,56.666668,34.666668,41.0,17.11111,50.0,9.0,34.666668,137.259,82.53,27.532667,178.49,95.96,137.259,2134.0,1896.0,756.0,2896.0,1000.0,2134.0,4.0,4.0,1.3333334,6.0,2.0,4.0,5.6666665,1.0,0.44444445,6.0,5.0,5.6666665,181.33333,61.0,22.444445,215.0,154.0,181.33333,1.45,1.27,0.47333333,2.16,0.89,1.45,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,0.0,0.0,0.0,0.0,0.0,0.0,2.3333333,5.0,1.7777778,5.0,0.0,2.3333333,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,8.666667,16.0,6.2222223,18.0,2.0,8.666667,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,0.0,0.0,0.0,0.0,0.0,0.0,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,4.6666665,8.0,3.1111112,8.0,0.0,4.6666665,33.826668,47.9,19.842222,63.59,15.69,33.826668,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1923077,0.0,0.26923078,0.53846157,0.33183825,0.12040112 +7,4.019324,0.027489575,3.9644597,4.074303,0.0074488395,0.9851023,1.0147681,0.69541115,0.0043136873,0.686796,0.7040385,0.010144,0.031685807,21.031254,0.22941206,20.817533,21.490078,0.6725433,2.7670307,0.6934246,2.1031468,4.1538796,2.050733,57.721214,0.5627657,57.198242,58.84675,1.6485044,363.94186,95.19606,268.74582,461.4403,192.6945,1.150878,0.23118557,0.92949975,1.6132491,0.6837493,0.68560314,0.00013209206,0.68535256,0.6858673,0.00051473407,13.296024,1.1573808,12.186378,15.6107855,3.4244087,0.545553,0.046297874,0.5011621,0.6381488,0.13698664,0.68560314,0.00013209206,0.68535256,0.6858673,0.00051473407,0.0,0.0,0.0,0.0,0.0,1.8364811,0.23131765,1.6148523,2.2991164,0.684264,9.598444,0.0018492889,9.594936,9.602142,0.0072062775,8.690396,0.23166701,8.471922,9.15373,0.68180853,0.68560314,0.00013209206,0.68535256,0.6858673,0.00051473407,0.0,0.0,0.0,0.0,0.0,1.8364811,0.23131765,1.6148523,2.2991164,0.684264,0.0,0.0,0.0,0.0,0.0,1.150878,0.23118557,0.92949975,1.6132491,0.6837493,3.5643368,0.94301915,2.6615272,5.450375,2.788848,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,57.25,31.0,15.25,73.0,42.0,42.0,48.25,5.0,1.75,50.0,45.0,50.0,137.83948,84.98788,41.87947,180.94788,95.96,95.96,2897.0,784.0,196.5,3290.0,2506.0,2896.0,5.25,2.0,0.75,6.0,4.0,6.0,5.5,1.0,0.5,6.0,5.0,5.0,163.25,21.0,9.25,175.0,154.0,154.0,1.78,0.86,0.38,2.16,1.3,2.16,1.5,1.0,0.5,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,3.75,3.0,1.25,5.0,2.0,5.0,7.0,14.0,7.0,14.0,0.0,0.0,12.25,13.0,6.25,19.0,6.0,6.0,0.5,1.0,0.5,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.25,3.0,1.25,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.75,2.0,0.75,8.0,6.0,6.0,17.925,6.51,2.235,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12244898,0.0,0.30612245,0.5714286,0.1688126,0.069241896 +8,12.342013,0.97081345,8.139641,13.19856,0.041530102,0.8755054,1.0451951,0.04651064,0.012235619,0.029261949,0.067039385,0.10290897,0.5070659,15.732445,6.1095634,7.3670816,45.0,37.63292,3.350294,1.3878491,1.4994904,13.0,11.500509,42.718803,16.52261,20.057518,119.55681,99.49929,224.77954,89.705605,103.32869,738.0,634.6713,1.3136369,0.54170007,0.5899182,5.0,4.410082,0.59061664,0.23725107,0.27026424,2.0,1.7297357,13.329775,5.452658,6.0212774,49.0,42.97872,0.35699016,0.12743029,0.17612296,0.58,0.40387702,0.3258094,0.16037463,0.0,0.674408,0.674408,0.0,0.0,0.0,0.0,0.0,1.6394463,0.6445553,0.761403,5.0,4.238597,6.4149823,2.380744,3.0922434,14.0,10.907757,5.101345,1.839044,2.502325,9.0,6.497675,0.3258094,0.16037463,0.0,0.674408,0.674408,0.0,0.0,0.0,0.0,0.0,1.6394463,0.6445553,0.761403,5.0,4.238597,0.0,0.0,0.0,0.0,0.0,1.3136369,0.54170007,0.5899182,5.0,4.410082,3.7039044,1.4729387,1.7068202,11.955,10.248179,0.0,0.0,0.0,0.0,0.0,0.2050232,0.19192114,0.0,1.5484712,1.5484712,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.214287,45.0,15.153061,72.0,27.0,72.0,47.357143,13.0,3.0306122,58.0,45.0,45.0,146.37022,119.55681,41.29685,178.49,58.933193,178.49,2564.7144,1128.0,189.30612,2896.0,1768.0,2506.0,4.928571,5.0,1.1938776,9.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.5,49.0,12.214286,175.0,126.0,175.0,1.5871428,0.86,0.36918366,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.2142856,5.0,1.5612245,7.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.928572,12.0,5.234694,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.785714,5.0,1.5612245,8.0,3.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.071429,5.0,1.1938776,8.0,3.0,8.0,19.48607,11.955,3.4893367,22.2,10.245,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.11060508,1.5484712,0.20540945,1.5484712,0.0,0.0,0.12307692,0.0,0.23076923,0.64615387,0.1688126,0.07021262 +9,12.313985,0.84451807,8.686016,13.122087,0.03686233,0.90486324,1.0461731,0.048621625,0.0063871136,0.03193994,0.056902915,0.093600325,0.5447091,17.261395,3.389258,8.786732,39.0,30.213268,7.255804,4.447846,2.3030849,39.0,36.696915,46.9848,9.0219965,23.889105,103.5684,79.6793,359.61496,157.86699,179.64061,1511.047,1331.4064,2.2825382,1.210536,0.9212339,11.0,10.078766,0.6696947,0.17263575,0.34617156,2.0,1.6538285,15.949886,5.3558793,8.335213,56.0,47.664787,0.46275958,0.07845725,0.23377094,0.88,0.6462291,0.43289605,0.10901329,0.0,0.66564894,0.66564894,0.4042066,0.37082762,0.0,3.0,3.0,2.2785552,0.76512563,1.1907446,8.0,6.8092556,7.489428,1.2542188,3.7807918,14.0,10.219208,5.5009418,1.0217885,2.5556438,7.929023,5.3733788,0.43289605,0.10901329,0.0,0.66564894,0.66564894,0.4042066,0.37082762,0.0,3.0,3.0,2.2785552,0.76512563,1.1907446,8.0,6.8092556,0.0,0.0,0.0,0.0,0.0,1.4741249,0.46888068,0.76845807,5.0,4.231542,2.820705,0.70729005,0.025,4.3315883,4.306588,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.6681531,0.59883225,0.5567276,0.54404414,0.53807646,57.42857,39.0,15.612245,72.0,33.0,72.0,49.75,39.0,5.089286,84.0,45.0,45.0,138.66975,103.5684,42.664547,178.49,74.9216,178.49,2558.0715,1806.0,265.51532,2896.0,1090.0,2506.0,5.571429,11.0,1.6836735,15.0,4.0,4.0,5.464286,2.0,0.5739796,6.0,4.0,6.0,162.75,56.0,13.125,175.0,119.0,175.0,1.7007142,0.88,0.42933673,2.18,1.3,1.3,1.6071428,1.0,0.47704083,2.0,1.0,2.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,3.75,8.0,1.875,10.0,2.0,2.0,7.5,14.0,6.964286,14.0,0.0,14.0,13.071428,12.0,5.5561223,18.0,6.0,18.0,0.39285713,1.0,0.47704083,1.0,0.0,0.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,6.25,8.0,1.875,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.857143,5.0,1.2244898,8.0,3.0,8.0,19.640715,6.51,3.1041327,22.2,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12295082,0.016393442,0.28688523,0.57377046,0.17601256,0.08453228 +10,11.884163,0.042564467,11.85224,11.9480095,0.010912655,0.9918155,1.016369,0.052253075,0.00078448403,0.05107635,0.052841436,0.03130533,0.58654994,10.821612,3.362714,8.299577,15.865684,7.566107,1.8036021,0.56045234,1.3832629,2.6442807,1.2610178,29.770256,9.250827,22.832136,43.646496,20.81436,140.68097,43.715282,107.8945,206.25389,98.35939,0.72144085,0.22418094,0.55330515,1.0577122,0.5044071,0.36072043,0.11209047,0.27665257,0.5288561,0.25220355,7.575129,2.3539,5.809704,11.105979,5.2962747,0.31021956,0.0963978,0.23792121,0.45481628,0.21689506,0.36072043,0.11209047,0.27665257,0.5288561,0.25220355,0.0,0.0,0.0,0.0,0.0,1.0821613,0.3362714,0.8299577,1.5865684,0.7566107,5.050086,1.5692666,3.873136,7.403986,3.5308497,4.328645,1.3450856,3.319831,6.3462734,3.0264428,0.36072043,0.11209047,0.27665257,0.5288561,0.25220355,0.0,0.0,0.0,0.0,0.0,1.0821613,0.3362714,0.8299577,1.5865684,0.7566107,0.0,0.0,0.0,0.0,0.0,0.72144085,0.22418094,0.55330515,1.0577122,0.5044071,2.34829,0.72970897,1.8010082,3.4428535,1.6418451,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.74535596,0.6933613,0.6707822,0.6674082,0.6667317,52.0,30.0,13.333333,72.0,42.0,42.0,48.333332,5.0,2.2222223,50.0,45.0,50.0,123.47,82.53,36.68,178.49,95.96,95.96,2766.0,390.0,173.33333,2896.0,2506.0,2896.0,5.3333335,2.0,0.8888889,6.0,4.0,6.0,5.3333335,1.0,0.44444445,6.0,5.0,5.0,161.0,21.0,9.333333,175.0,154.0,154.0,1.8733333,0.86,0.38222224,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,1.3333334,5.0,2.0,5.0,4.6666665,14.0,6.2222223,14.0,0.0,0.0,10.0,12.0,5.3333335,18.0,6.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.3333334,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.6666665,2.0,0.8888889,8.0,6.0,6.0,17.86,6.51,2.8933334,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13333334,0.0,0.4,0.46666667,0.1688126,0.07502782 +11,11.159393,1.3293289,9.1654005,12.888673,0.035189316,0.95182776,1.052784,0.06524273,0.016598172,0.040345475,0.08316048,0.07362884,0.5486481,34.94088,8.177761,22.67424,43.707333,21.033092,20.619295,6.371208,13.798408,30.176107,16.3777,90.31807,21.247843,58.446304,111.25868,52.812374,721.4453,169.05135,531.7042,975.02234,443.3181,6.218578,1.8083016,4.1452785,8.93103,4.785752,1.8410761,0.44153726,1.2138362,2.503382,1.2895459,39.296707,9.50359,25.919962,53.55209,27.632128,0.47845948,0.06722935,0.416973,0.5793035,0.16233052,0.51823634,0.17746559,0.3304568,0.78443474,0.45397797,1.2682128,0.46261352,0.858803,1.962133,1.1033301,2.8229218,0.5652328,1.9750726,3.2815857,1.3065131,8.020087,2.328149,4.9704657,11.512311,6.5418453,8.142573,2.015581,5.119202,10.8513975,5.7321954,0.51823634,0.17746559,0.3304568,0.78443474,0.45397797,2.5364256,0.92522705,1.717606,3.924266,2.2066603,4.889122,1.1180099,3.2121072,6.5290794,3.3169725,0.0,0.0,0.0,0.0,0.0,2.4139395,0.56338006,1.5688695,3.044631,1.4757615,3.4698622,0.5873742,3.0169318,4.3509235,1.3339919,0.49016422,0.17880014,0.33192733,0.75836444,0.42643708,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,42.666668,58.0,19.555555,72.0,14.0,42.666668,57.666668,33.0,13.555555,78.0,45.0,57.666668,100.84517,150.4045,51.76322,178.49,28.0855,100.84517,2363.0,1209.0,450.66666,2896.0,1687.0,2363.0,8.0,10.0,4.0,14.0,4.0,8.0,4.6666665,3.0,1.1111112,6.0,3.0,4.6666665,146.66667,64.0,23.777779,175.0,111.0,146.66667,1.7866666,0.86,0.32444444,2.16,1.3,1.7866666,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,0.6666667,2.0,0.8888889,2.0,0.0,0.6666667,2.3333333,5.0,1.7777778,5.0,0.0,2.3333333,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,9.333333,14.0,5.7777777,18.0,4.0,9.333333,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,1.3333334,4.0,1.7777778,4.0,0.0,1.3333334,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,6.0,4.0,1.3333334,8.0,4.0,6.0,19.443333,6.51,2.5022223,22.2,15.69,19.443333,0.25766668,0.773,0.34355557,0.773,0.0,0.25766668,0.0,0.0,0.0,0.0,0.0,0.0,0.17857143,0.071428575,0.25,0.5,0.1688126,0.060364313 +12,11.11821,1.2895938,9.18382,12.749363,0.03381145,0.9533218,1.0507172,0.06447924,0.01874428,0.036362816,0.08197511,0.0711763,0.5563545,23.61261,5.858201,14.825309,29.80142,14.976112,21.274132,6.599487,14.06157,31.173363,17.111792,62.300396,15.528492,39.007656,79.895775,40.88812,1021.3796,278.26065,773.41187,1438.7705,665.35864,6.22602,1.8086207,4.09805,8.938951,4.840901,1.209982,0.28873697,0.77687657,1.5274801,0.7506035,33.65608,7.886017,21.827051,44.82573,22.998676,0.4856885,0.1006814,0.36703876,0.63671064,0.26967186,0.5132299,0.17576446,0.32896307,0.77687657,0.44791353,1.2686852,0.46176332,0.84809893,1.9613302,1.1132312,4.8986316,1.1466018,3.1787288,6.5437703,3.3650417,8.058952,2.3486102,4.93958,11.581868,6.642288,6.408397,0.81194943,5.6580606,7.6263213,1.9682606,0.5132299,0.17576446,0.32896307,0.77687657,0.44791353,2.5373704,0.92352664,1.6961979,3.9226604,2.2264624,4.8986316,1.1466018,3.1787288,6.5437703,3.3650417,0.0,0.0,0.0,0.0,0.0,2.419964,0.57747394,1.5537531,3.0549603,1.501207,3.8517725,1.0313693,2.5426917,5.3988266,2.856135,0.24295321,0.08842768,0.16241094,0.37559474,0.21318378,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,48.666668,40.0,15.555555,72.0,32.0,48.666668,58.0,34.0,14.0,79.0,45.0,58.0,115.69666,105.85,41.86222,178.49,72.64,115.69666,2204.4666,1684.6,662.04443,2896.0,1211.4,2204.4666,8.0,10.0,4.0,14.0,4.0,8.0,5.0,2.0,0.6666667,6.0,4.0,5.0,149.66667,55.0,19.777779,175.0,120.0,149.66667,1.8233334,0.86,0.34888887,2.16,1.3,1.8233334,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,0.6666667,2.0,0.8888889,2.0,0.0,0.6666667,5.6666665,8.0,2.8888888,10.0,2.0,5.6666665,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,12.666667,12.0,4.4444447,18.0,6.0,12.666667,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,1.3333334,4.0,1.7777778,4.0,0.0,1.3333334,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,6.0,4.0,1.3333334,8.0,4.0,6.0,20.298334,7.315,3.0722222,23.005,15.69,20.298334,0.12766667,0.383,0.17022222,0.383,0.0,0.12766667,0.0,0.0,0.0,0.0,0.0,0.0,0.13157895,0.05263158,0.4473684,0.36842105,0.1688126,0.06507311 +13,12.332335,0.86128265,8.505514,13.046196,0.040241748,0.90933645,1.0436102,0.046801664,0.009013921,0.03446873,0.056892935,0.102816604,0.54085743,17.580893,6.785357,8.814743,57.0,48.185257,6.7896647,4.458619,2.1861346,38.0,35.813866,47.106865,18.05822,23.660233,147.51624,123.856,424.60147,252.01407,170.5185,2188.7,2018.1815,2.1470952,1.2647007,0.87445384,11.0,10.125546,0.7349935,0.32357237,0.36357552,3.0,2.6364245,16.111973,7.4125752,7.952135,68.0,60.047867,0.4032319,0.14580138,0.20551243,0.89,0.6844876,0.32872868,0.16424017,0.0,0.67578965,0.67578965,0.40626484,0.3705336,0.0,3.0,3.0,1.2570293,0.4440916,0.64685816,2.027369,1.3805108,6.498104,2.3415356,3.3145835,14.0,10.685416,5.705225,2.0725038,2.9044814,13.0,10.095518,0.32872868,0.16424017,0.0,0.67578965,0.67578965,0.40626484,0.3705336,0.0,3.0,3.0,2.0695589,0.8472059,1.0273168,8.0,6.972683,0.0,0.0,0.0,0.0,0.0,1.3345654,0.53092515,0.6637413,5.0,4.336259,2.190162,1.040553,0.3702381,4.3993907,4.0291524,0.22006012,0.20070569,0.0,1.625,1.625,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,59.357143,57.0,16.255102,72.0,15.0,72.0,49.142857,38.0,5.3265305,83.0,45.0,45.0,144.37312,147.51624,43.86455,178.49,30.973763,178.49,2461.0928,2578.7,306.25613,2896.0,317.3,2506.0,5.357143,11.0,1.744898,15.0,4.0,4.0,5.5,3.0,0.64285713,6.0,3.0,6.0,164.14285,68.0,13.959184,175.0,107.0,175.0,1.6092857,0.89,0.39765307,2.19,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,2.7142856,5.0,1.3061224,5.0,0.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.642858,13.0,5.602041,18.0,5.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.071429,5.0,1.1938776,8.0,3.0,8.0,20.366446,6.880238,2.6722546,22.570238,15.69,22.2,0.116071425,1.625,0.21556123,1.625,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12565444,0.015706806,0.19895288,0.65968585,0.17965065,0.07852051 +14,13.097851,1.9335061,11.612215,15.99878,0.04510463,0.96556914,1.068008,0.057252117,0.027092302,0.015603563,0.080325626,0.093874685,0.648188,15.879205,4.461211,12.430596,22.602057,10.171461,2.6465342,0.7435352,2.071766,3.7670095,1.6952435,43.683693,12.272792,34.19657,62.17826,27.98169,206.42967,57.995747,161.59775,293.82675,132.22899,1.0586137,0.2974141,0.8287064,1.5068038,0.6780974,0.5293068,0.14870705,0.4143532,0.7534019,0.3390487,11.115443,3.1228478,8.701417,15.82144,7.120023,0.4552039,0.12788805,0.35634375,0.6479256,0.2915819,0.5293068,0.14870705,0.4143532,0.7534019,0.3390487,0.0,0.0,0.0,0.0,0.0,1.5879205,0.44612113,1.2430596,2.2602057,1.0171461,7.4102955,2.0818987,5.800945,10.5476265,4.7466817,6.351682,1.7844845,4.9722385,9.040823,4.0685844,0.5293068,0.14870705,0.4143532,0.7534019,0.3390487,0.0,0.0,0.0,0.0,0.0,1.5879205,0.44612113,1.2430596,2.2602057,1.0171461,0.0,0.0,0.0,0.0,0.0,1.0586137,0.2974141,0.8287064,1.5068038,0.6780974,3.4457874,0.96808285,2.6974394,4.9046464,2.207207,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.74535596,0.6933613,0.6707822,0.6674082,0.6667317,52.0,30.0,13.333333,72.0,42.0,42.0,48.333332,5.0,2.2222223,50.0,45.0,50.0,123.47,82.53,36.68,178.49,95.96,95.96,2766.0,390.0,173.33333,2896.0,2506.0,2896.0,5.3333335,2.0,0.8888889,6.0,4.0,6.0,5.3333335,1.0,0.44444445,6.0,5.0,5.0,161.0,21.0,9.333333,175.0,154.0,154.0,1.8733333,0.86,0.38222224,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,1.3333334,5.0,2.0,5.0,4.6666665,14.0,6.2222223,14.0,0.0,0.0,10.0,12.0,5.3333335,18.0,6.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.3333334,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.6666665,2.0,0.8888889,8.0,6.0,6.0,17.86,6.51,2.8933334,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13333334,0.0,0.4,0.46666667,0.1688126,0.07502782 +15,13.049894,1.9540253,11.491188,15.998551,0.04460263,0.96071327,1.0720559,0.05658313,0.026299799,0.014521691,0.083661295,0.092196226,0.638808,17.978153,7.556388,11.45682,35.351597,23.894777,7.0330296,4.133605,2.0726151,29.225266,27.15265,47.87434,19.945858,29.728884,89.25767,59.52878,348.12784,143.02617,161.66399,1113.4292,951.76526,2.2596114,1.2100931,0.8290461,8.490107,7.6610603,0.78380525,0.3552689,0.41452304,2.2450533,1.8305302,16.632912,7.581441,8.704984,48.14612,39.441135,0.40031007,0.14796798,0.22237796,0.75862586,0.5362479,0.5293351,0.14938362,0.39492235,0.75843966,0.3635173,0.34600037,0.28456813,0.0,2.0,2.0,1.9340055,0.6320729,1.2435691,4.26484,3.0212712,6.1292686,2.774444,2.7929566,13.100069,10.307113,5.599659,2.2863505,2.784814,11.58319,8.798376,0.5293351,0.14938362,0.39492235,0.75843966,0.3635173,0.69200075,0.56913626,0.0,4.0,4.0,2.1784155,0.9579531,1.2435691,5.73516,4.4915905,0.0,0.0,0.0,0.0,0.0,1.2216102,0.51602066,0.7898447,2.4901066,1.7002617,3.2848783,1.001819,2.2269957,5.249454,3.0224586,0.13372914,0.10998559,0.0,0.773,0.773,0.0,0.0,0.0,0.0,0.0,3.0,0.7168604,0.6786044,0.66765654,0.666766,0.6666703,47.166668,58.0,12.416667,72.0,14.0,42.0,51.083332,33.0,4.486111,78.0,45.0,50.0,110.936295,150.4045,33.776855,178.49,28.0855,95.96,2697.75,1209.0,264.33334,2896.0,1687.0,2896.0,6.1666665,10.0,1.3055556,14.0,4.0,6.0,5.0833335,3.0,0.45833334,6.0,3.0,5.0,155.66667,64.0,9.666667,175.0,111.0,154.0,1.9233333,0.86,0.31555554,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.16666667,2.0,0.30555555,2.0,0.0,0.0,3.8333333,5.0,1.5555556,5.0,0.0,5.0,3.5,14.0,5.25,14.0,0.0,0.0,8.833333,14.0,4.5833335,18.0,4.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.33333334,4.0,0.6111111,4.0,0.0,0.0,5.3333335,8.0,1.3333334,8.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.3333335,4.0,0.8333333,8.0,4.0,6.0,17.713333,6.51,2.6977777,22.2,15.69,15.69,0.06441667,0.773,0.11809722,0.773,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1509434,0.018867925,0.43396226,0.3962264,0.1688126,0.061719067 +16,11.96129,0.0107146185,11.943038,12.012012,0.004078995,0.9806075,1.007638,0.018966412,0.007161421,0.0071924264,0.029267592,0.011480242,0.6766762,11.192134,8.451966,0.015161108,60.0,59.98484,5.0887723,3.8528068,0.0058117583,23.0,22.994188,28.093048,21.206738,0.03896026,154.185,154.14604,440.42026,334.39493,0.40000057,1973.0,1972.6,0.7415886,0.56460285,0.00050537026,4.0,3.9994946,0.6210263,0.46974343,0.0007580554,3.0,2.9992418,5.8508525,4.4122868,0.008591294,34.0,33.99141,0.10566443,0.10126382,2.5268514e-06,0.85,0.84999746,0.122839145,0.11986989,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.86442775,0.65889305,0.00050537026,5.0,4.9994946,1.7516239,1.7449495,0.0,14.0,14.0,2.4932125,1.9368981,0.004042962,16.0,15.995957,0.122839145,0.11986989,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.6151236,1.2212191,0.002021481,8.0,7.9979787,0.0,0.0,0.0,0.0,0.0,1.7379628,1.3155093,0.002021481,8.0,7.9979787,0.9707719,0.8198981,0.00017435274,7.2,7.199826,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.87945294,0.87521255,0.87500066,0.875,0.875,17.625,60.0,9.84375,72.0,12.0,12.0,65.4375,23.0,4.484375,68.0,45.0,68.0,38.42,154.185,24.70125,178.49,24.305,24.305,1145.25,1973.0,388.9375,2896.0,923.0,923.0,2.375,4.0,0.65625,6.0,2.0,2.0,3.3125,3.0,0.546875,6.0,3.0,3.0,143.9375,34.0,5.140625,175.0,141.0,141.0,1.3625,0.86,0.0996875,2.16,1.3,1.31,1.9375,1.0,0.1171875,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4375,5.0,0.765625,5.0,0.0,0.0,0.875,14.0,1.640625,14.0,0.0,0.0,3.25,16.0,2.1875,18.0,2.0,2.0,0.0625,1.0,0.1171875,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8125,8.0,1.421875,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.875,8.0,1.53125,8.0,0.0,0.0,22.396875,7.2,0.86296874,22.89,15.69,22.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.59615386,0.0,0.13461539,0.26923078,0.1688126,0.019395985 +17,12.388589,0.96206605,8.293283,13.279011,0.04010632,0.8768429,1.0404059,0.04640187,0.00809802,0.03331555,0.06356463,0.099592164,0.50918055,18.631355,7.5330987,8.94384,67.0,58.05616,5.1935763,3.1152034,2.0802824,27.0,24.919718,49.060375,19.600687,23.711979,167.679,143.96703,148.1659,51.825005,78.02196,261.11417,183.0922,1.8400182,1.0228546,0.832113,9.0,8.167887,0.8540611,0.44941983,0.39289853,4.0,3.6071014,18.858555,10.305921,8.626375,91.0,82.37363,0.37837383,0.13626789,0.19305475,0.74,0.5469453,0.32647783,0.15981652,0.0,0.6695235,0.6695235,0.13189583,0.124014884,0.0,1.0,1.0,1.2432251,0.43682215,0.6422586,2.0085704,1.3663118,6.417231,2.3501515,3.247544,14.0,10.752456,5.8961716,2.2168798,2.9445403,15.0,12.05546,0.32647783,0.15981652,0.0,0.6695235,0.6695235,0.65947914,0.6200744,0.0,5.0,5.0,2.0346,0.8522,0.9641208,8.0,7.035879,0.0,0.0,0.0,0.0,0.0,1.0486431,0.40322402,0.51757854,3.0,2.4824214,4.107435,1.6676503,1.9670182,15.0275,13.060482,0.20100924,0.18899868,0.0,1.524,1.524,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,58.642857,67.0,17.17347,72.0,5.0,72.0,48.357143,27.0,4.3163266,72.0,45.0,45.0,142.93292,167.679,45.716236,178.49,10.811,178.49,2606.1428,548.0,165.63266,2896.0,2348.0,2506.0,5.214286,9.0,1.5612245,13.0,4.0,4.0,5.428571,4.0,0.7346939,6.0,2.0,6.0,162.5,91.0,16.071428,175.0,84.0,175.0,1.5985714,0.86,0.38387755,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.071428575,1.0,0.13265306,1.0,0.0,0.0,2.7142856,5.0,1.3061224,5.0,0.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.5,15.0,5.785714,18.0,3.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.35714287,5.0,0.6632653,5.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.214286,3.0,1.0102041,8.0,5.0,8.0,19.266607,15.0275,3.771505,22.2,7.1725,22.2,0.10885714,1.524,0.20216326,1.524,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12698413,0.005291005,0.2010582,0.6666667,0.1688126,0.0739093 +18,3.8520277,0.00014603692,3.8517356,3.8523197,4.46545e-05,0.9999107,1.0000893,0.7296851,2.4675157e-05,0.7296358,0.7297345,6e-05,0.027836652,27.455685,3.2549329,24.190594,33.96555,9.774956,8.605725,2.557455,6.040282,13.720636,7.680354,74.95058,8.664305,66.25924,92.27919,26.019949,543.0946,135.4041,407.7453,813.90283,406.15753,3.2563004,0.9299836,2.323412,5.1162677,2.7928557,1.1631758,0.2324957,0.9299541,1.6281672,0.6982131,28.611462,6.9748745,21.614803,42.56121,20.946407,0.54214936,0.058121283,0.48384386,0.65839195,0.17454806,0.464986,0.23248407,0.23176609,0.9299541,0.69818807,0.0,0.0,0.0,0.0,0.0,3.4895275,0.69748706,2.7898624,4.8845015,2.0946393,9.77482,6.008933e-06,9.774808,9.774832,2.4029998e-05,7.445555,0.92993975,6.5126686,9.305435,2.7927659,0.464986,0.23248407,0.23176609,0.9299541,0.69818807,0.0,0.0,0.0,0.0,0.0,3.4895275,0.69748706,2.7898624,4.8845015,2.0946393,0.0,0.0,0.0,0.0,0.0,3.2563004,0.9299836,2.323412,5.1162677,2.7928557,7.042204,1.2485014,5.7898045,9.5392065,3.7494023,0.0,0.0,0.0,0.0,0.0,0.2768433,0.13842696,0.13798383,0.5536972,0.41571337,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,53.5,44.0,18.5,72.0,28.0,72.0,50.25,16.0,5.375,61.0,45.0,45.0,127.90835,119.7966,50.58165,178.49,58.6934,178.49,2409.0,1168.0,340.5,2896.0,1728.0,2506.0,6.0,6.0,2.0,10.0,4.0,4.0,5.25,2.0,0.75,6.0,4.0,6.0,157.0,51.0,18.0,175.0,124.0,175.0,1.6675,0.86,0.3675,2.16,1.3,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.25,6.0,2.25,8.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,13.0,12.0,5.0,18.0,6.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.75,6.0,2.25,8.0,2.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,6.0,2.0,8.0,2.0,8.0,17.6025,11.88,4.5975,22.2,10.32,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.14884867,0.5953947,0.22327301,0.5953947,0.0,0.0,0.13461539,0.0,0.32692307,0.53846157,0.1688126,0.06634842 +19,4.204487,0.12176787,3.96306,4.448023,0.029852431,0.94029516,1.0575148,0.6678404,0.01802285,0.6320472,0.7038861,0.040998,0.033283938,34.177933,6.6216764,27.71238,47.421288,19.708906,14.2057085,5.238122,9.100314,24.681953,15.581639,88.96445,15.7581625,73.57234,120.480774,46.908432,915.6891,314.29092,683.5429,1544.2709,860.728,4.6517887,1.594556,3.0974016,7.840901,4.7434993,1.6232846,0.45589647,1.1786618,2.5350776,1.3564157,29.873316,7.525537,22.531662,44.92439,22.392727,0.46539104,0.12981382,0.34287077,0.7250187,0.38214788,0.45924526,0.23636189,0.23566827,0.93196905,0.6963008,0.4684066,0.22758767,0.24669269,0.92358196,0.67688924,2.314549,0.6071816,1.7073674,3.2892926,1.5819253,9.6105995,0.010095931,9.593869,9.630792,0.03692193,9.642877,0.6914167,8.963394,11.02571,2.062315,0.45924526,0.23636189,0.23566827,0.93196905,0.6963008,2.342033,1.1379384,1.2334635,4.6179094,3.3844461,4.4014473,1.1401018,3.2892926,6.6816506,3.392358,0.0,0.0,0.0,0.0,0.0,1.8413494,0.22902995,1.6172454,2.2994092,0.6821638,4.28942,0.18330818,4.121044,4.6560364,0.5349924,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,49.75,59.0,22.25,72.0,13.0,72.0,53.25,28.0,9.875,73.0,45.0,45.0,119.980385,151.50847,58.509617,178.49,26.981539,178.49,2210.3674,1962.53,638.4487,2896.0,933.47,2506.0,6.75,9.0,3.125,13.0,4.0,4.0,5.0,3.0,1.0,6.0,3.0,6.0,156.25,54.0,18.75,175.0,121.0,175.0,1.5925,0.86,0.2925,2.16,1.3,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.25,1.0,0.375,1.0,0.0,0.0,2.25,5.0,1.375,5.0,0.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,11.25,15.0,6.75,18.0,3.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,1.25,5.0,1.875,5.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.75,3.0,1.25,8.0,5.0,8.0,19.1425,6.51,3.0575,22.2,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15555556,0.022222223,0.2,0.62222224,0.1688126,0.057242345 +20,12.257535,0.7989197,8.450188,12.909404,0.04067917,0.92469865,1.0456647,0.04873659,0.014101642,0.029596373,0.0648398,0.10491977,0.5532965,15.117473,5.543581,7.424069,38.0,30.575932,7.712237,5.183966,2.2086067,44.0,41.791393,40.897774,14.929473,20.104858,99.53,79.42514,405.79587,229.45773,172.27132,2012.0,1839.7286,2.3128889,1.383873,0.88344264,12.0,11.116557,0.60501176,0.23177879,0.29414502,2.0,1.705855,14.497403,5.786085,7.0044847,55.0,47.995514,0.45551676,0.16921167,0.22304961,1.25,1.0269504,0.3292954,0.16671976,0.0,0.68083286,0.68083286,0.5514327,0.49265248,0.0,4.0,4.0,2.0907516,0.8441783,1.0097333,8.0,6.990267,6.54015,2.3477685,3.226942,14.0,10.773058,4.227261,1.8430849,2.0,8.169994,6.1699944,0.3292954,0.16671976,0.0,0.68083286,0.68083286,0.27571636,0.24632624,0.0,2.0,2.0,2.0907516,0.8441783,1.0097333,8.0,6.990267,0.0,0.0,0.0,0.0,0.0,1.4857398,0.6448943,0.71558833,6.0,5.2844114,2.6565454,0.9442206,1.3229469,4.432222,3.1092749,0.11014869,0.09840733,0.0,0.799,0.799,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.714287,38.0,14.510204,72.0,34.0,72.0,49.57143,44.0,5.877551,89.0,45.0,45.0,147.80072,99.53,39.457653,178.49,78.96,178.49,2473.7144,2402.0,282.8163,2896.0,494.0,2506.0,5.428571,12.0,1.8367347,16.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.07143,55.0,12.765306,175.0,120.0,175.0,1.635,1.25,0.43071428,2.55,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,3.4285715,8.0,1.8367347,10.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,14.428572,12.0,4.8163266,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,6.0,1.2857143,8.0,2.0,8.0,20.605715,10.23,2.8089795,25.92,15.69,22.2,0.05707143,0.799,0.1059898,0.799,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.11881188,0.01980198,0.23762377,0.62376237,0.32336617,0.09323259 +21,13.123931,1.9172075,11.686025,15.999742,0.045404326,0.96594673,1.0681065,0.05769275,0.027517898,0.016415901,0.07833117,0.094878465,0.6687149,15.87826,4.4492784,12.541302,22.552177,10.010876,2.6463766,0.74154633,2.0902169,3.7586963,1.6684793,43.68109,12.2399645,34.50112,62.04104,27.539919,206.41737,57.840614,163.03691,293.1783,130.14139,1.0585507,0.29661855,0.83608675,1.5034785,0.6673917,0.52927536,0.14830928,0.41804338,0.75173926,0.33369586,11.114782,3.1144948,8.778911,15.786524,7.007613,0.4551768,0.12754597,0.3595173,0.64649576,0.28697845,0.52927536,0.14830928,0.41804338,0.75173926,0.33369586,0.0,0.0,0.0,0.0,0.0,1.587826,0.4449278,1.2541301,2.2552178,1.0010875,7.409855,2.0763297,5.8526073,10.524349,4.671742,6.351304,1.7797112,5.0165205,9.020871,4.00435,0.52927536,0.14830928,0.41804338,0.75173926,0.33369586,0.0,0.0,0.0,0.0,0.0,1.587826,0.4449278,1.2541301,2.2552178,1.0010875,0.0,0.0,0.0,0.0,0.0,1.0585507,0.29661855,0.83608675,1.5034785,0.6673917,3.4455824,0.9654934,2.7214625,4.8938227,2.17236,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.74535596,0.6933613,0.6707822,0.6674082,0.6667317,52.0,30.0,13.333333,72.0,42.0,42.0,48.333332,5.0,2.2222223,50.0,45.0,50.0,123.47,82.53,36.68,178.49,95.96,95.96,2766.0,390.0,173.33333,2896.0,2506.0,2896.0,5.3333335,2.0,0.8888889,6.0,4.0,6.0,5.3333335,1.0,0.44444445,6.0,5.0,5.0,161.0,21.0,9.333333,175.0,154.0,154.0,1.8733333,0.86,0.38222224,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,1.3333334,5.0,2.0,5.0,4.6666665,14.0,6.2222223,14.0,0.0,0.0,10.0,12.0,5.3333335,18.0,6.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.3333334,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.6666665,2.0,0.8888889,8.0,6.0,6.0,17.86,6.51,2.8933334,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13333334,0.0,0.4,0.46666667,0.1688126,0.07502782 +22,12.396388,0.91656756,8.361635,13.228353,0.042205706,0.88747585,1.0439306,0.046202358,0.007949817,0.031982787,0.05862105,0.10690956,0.52003795,23.705063,11.224,9.0097065,67.0,57.990295,8.207523,4.9644203,2.3946905,27.0,24.60531,60.739616,28.038975,23.847242,167.679,143.83176,178.0526,106.31093,76.909706,379.05276,302.14307,2.7990923,1.6097445,0.8563479,9.0,8.143652,1.2651247,0.7004213,0.39997166,4.0,3.6000283,28.449518,16.17116,8.794237,91.0,82.205765,0.38107637,0.14470477,0.19168322,0.74,0.5483168,0.34762925,0.24579802,0.0,0.8481004,0.8481004,0.26884294,0.20890202,0.056404594,1.0,0.9435954,1.5805737,0.92958677,0.635869,3.7004058,3.0645368,6.4203405,2.3431702,3.2306104,14.0,10.76939,6.309678,2.4766576,2.9383082,15.0,12.061691,0.34762925,0.24579802,0.0,0.8481004,0.8481004,1.3442148,1.0445101,0.28202295,5.0,4.717977,2.7200022,1.3441647,0.9742966,8.0,7.0257034,0.0,0.0,0.0,0.0,0.0,1.1860344,0.49807042,0.5179204,3.0,2.4820797,5.275328,2.5189402,1.9826599,15.0275,13.04484,0.40971667,0.31836668,0.0859606,1.524,1.4380394,0.0,0.0,0.0,0.0,0.0,3.0,0.6925257,0.6529848,0.6434548,0.6429016,0.64285827,56.0,67.0,20.571428,72.0,5.0,72.0,49.92857,27.0,6.336735,72.0,45.0,45.0,136.85086,167.679,53.53604,178.49,10.811,178.49,2567.0,548.0,141.0,2896.0,2348.0,2506.0,5.714286,9.0,2.2040815,13.0,4.0,4.0,5.214286,4.0,1.0102041,6.0,2.0,6.0,157.5,91.0,22.5,175.0,84.0,175.0,1.59,0.86,0.37285715,2.16,1.3,1.3,1.7857143,1.0,0.33673468,2.0,1.0,2.0,0.14285715,1.0,0.24489796,1.0,0.0,0.0,2.357143,5.0,1.1326531,5.0,0.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.285714,15.0,6.0612245,18.0,3.0,18.0,0.21428572,1.0,0.33673468,1.0,0.0,0.0,0.71428573,5.0,1.2244898,5.0,0.0,0.0,6.214286,8.0,2.2959185,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.142857,3.0,1.1020408,8.0,5.0,8.0,18.658215,15.0275,4.5537243,22.2,7.1725,22.2,0.21771428,1.524,0.3732245,1.524,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13440861,0.010752688,0.17741935,0.67741936,0.1688126,0.07022926 +23,11.276716,1.28794,8.6609745,12.521767,0.031488076,0.90786105,1.0401058,0.057414502,0.0178641,0.020940652,0.083603196,0.056858964,0.57188344,35.70123,7.5168014,24.949446,45.63198,20.682533,23.86842,7.211405,16.385422,34.943436,18.558014,92.372894,20.207111,64.64997,116.62158,51.971615,1604.0577,467.9965,1226.0415,2484.0664,1258.0249,6.906789,1.9216639,4.7522683,9.948601,5.1963325,1.8816212,0.3876783,1.3035744,2.576221,1.2726464,42.657593,9.287552,29.520184,58.896797,29.376616,0.55567557,0.17771542,0.23491168,0.7964001,0.56148845,0.58461976,0.23249364,0.30147433,0.8745506,0.57307625,1.886128,0.6877844,1.2870716,2.9980228,1.7109512,3.011278,0.5959588,2.2429338,4.2818274,2.0388937,8.738837,2.8682516,3.3360934,12.728036,9.391943,8.119141,2.5092623,3.8588495,11.394248,7.535398,0.58461976,0.23249364,0.30147433,0.8745506,0.57307625,1.886128,0.6877844,1.2870716,2.9980228,1.7109512,5.0161543,1.02297,3.4816995,6.769431,3.2877314,0.0,0.0,0.0,0.0,0.0,3.1345332,0.6376054,2.1781251,4.19321,2.015085,4.0386467,1.4947399,2.317742,5.852165,3.534423,1.0216526,0.37254986,0.6971638,1.623929,0.9267652,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,43.0,57.0,19.333334,72.0,15.0,43.0,59.333332,38.0,15.777778,83.0,45.0,59.333332,101.80792,147.51624,51.121387,178.49,30.973763,101.80792,1906.4333,2578.7,1059.4222,2896.0,317.3,1906.4333,8.333333,11.0,4.4444447,15.0,4.0,8.333333,4.6666665,3.0,1.1111112,6.0,3.0,4.6666665,145.33333,68.0,25.555555,175.0,107.0,145.33333,1.8833333,0.89,0.3888889,2.19,1.3,1.8833333,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,1.0,3.0,1.3333334,3.0,0.0,1.0,2.3333333,5.0,1.7777778,5.0,0.0,2.3333333,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,9.666667,13.0,5.5555553,18.0,5.0,9.666667,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,1.0,3.0,1.3333334,3.0,0.0,1.0,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,5.6666665,5.0,1.7777778,8.0,3.0,5.6666665,20.153412,6.880238,2.9756083,22.570238,15.69,20.153412,0.5416667,1.625,0.7222222,1.625,0.0,0.5416667,0.0,0.0,0.0,0.0,0.0,0.0,0.1724138,0.10344828,0.2413793,0.4827586,0.17965065,0.07748628 +24,12.308861,0.8296493,8.555889,12.973562,0.04026777,0.9177729,1.0442042,0.0476526,0.010145355,0.033767644,0.059488848,0.10336838,0.54942995,15.304392,5.6492248,7.790351,40.0,32.20965,6.2775006,3.960357,2.2099552,34.0,31.790045,41.531094,15.268895,21.154196,105.85,84.6958,304.57007,141.43285,152.49411,1294.6,1142.1058,2.0201576,1.1399775,0.88398206,10.0,9.116018,0.60104316,0.23160456,0.30376095,2.0,1.6962391,14.394395,5.800801,7.23859,55.0,47.76141,0.3791885,0.13371682,0.1944493,0.71,0.5155507,0.3283528,0.16485493,0.0,0.6751349,0.6751349,0.2726904,0.24675852,0.0,2.0,2.0,2.07582,0.84631145,1.0435306,8.0,6.9564695,6.505772,2.3332253,3.326919,14.0,10.673081,4.4856143,1.666613,2.322654,8.101619,5.7789645,0.3283528,0.16485493,0.0,0.6751349,0.6751349,0.5453808,0.49351704,0.0,4.0,4.0,2.07582,0.84631145,1.0435306,8.0,6.9564695,0.0,0.0,0.0,0.0,0.0,1.2020863,0.46320912,0.6075219,4.0,3.3924782,2.2473347,1.0104867,0.805,4.3951283,3.5901282,0.052220207,0.047254257,0.0,0.383,0.383,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.57143,40.0,14.693877,72.0,32.0,72.0,48.857143,34.0,4.9591837,79.0,45.0,45.0,147.34929,105.85,40.038063,178.49,72.64,178.49,2524.957,1684.6,212.02449,2896.0,1211.4,2506.0,5.285714,10.0,1.6530613,14.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.07143,55.0,12.765306,175.0,120.0,175.0,1.5964285,0.86,0.38112244,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,3.4285715,8.0,1.8367347,10.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,14.285714,12.0,4.7755103,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.142857,4.0,1.1020408,8.0,4.0,8.0,20.3975,7.315,2.69,23.005,15.69,22.2,0.027357142,0.383,0.050806124,0.383,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12,0.01,0.24,0.63,0.1688126,0.07311587 +25,11.956922,1.7763568e-15,11.956922,11.956922,1.5628205e-16,1.0,1.0,0.051981524,6.071532e-17,0.051981524,0.051981524,8.3544124e-17,0.68017477,15.0,7.1120048,7.496268,29.224009,21.727741,4.8279724,1.25,3.4480186,7.3279724,3.8799539,41.265,19.84099,20.070503,80.946976,60.876476,253.9753,97.5,146.3389,448.9753,302.63638,1.7759907,0.5,1.2240093,2.7759907,1.5519816,0.5,0.25,0.22400923,1.0,0.7759908,15.931935,5.25,10.136129,26.431936,16.295807,0.43,0.1141212,0.33367604,0.6582424,0.32456636,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,1.5,0.75,0.6720277,3.0,2.3279722,7.0,3.5,3.1361294,14.0,10.863871,6.0,2.6120045,3.4641016,11.2240095,7.7599077,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,1.5,0.75,0.6720277,3.0,2.3279722,0.0,0.0,0.0,0.0,0.0,1.7759907,0.5,1.2240093,2.7759907,1.5519816,4.345267,1.6275,2.548567,7.600267,5.0516996,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,50.0,30.0,11.0,72.0,42.0,43.0,50.25,8.0,2.75,53.0,45.0,53.0,117.6125,82.53,30.43875,178.49,95.96,98.0,2565.5,466.0,165.25,2896.0,2430.0,2430.0,6.0,3.0,1.0,7.0,4.0,7.0,5.25,1.0,0.375,6.0,5.0,5.0,155.75,28.0,9.625,175.0,147.0,147.0,1.815,0.86,0.2575,2.16,1.3,1.9,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.25,3.0,1.125,5.0,2.0,5.0,3.5,14.0,5.25,14.0,0.0,0.0,9.5,12.0,4.25,18.0,6.0,7.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.75,3.0,1.125,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.0,8.0,5.0,5.0,16.615,7.915,2.7925,22.2,14.285,14.285,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.18421052,0.0,0.4473684,0.36842105,0.1688126,0.046808276 +26,12.291042,0.8171973,8.517075,12.932404,0.04049132,0.9214099,1.0449536,0.048149493,0.011885289,0.032244924,0.06202453,0.104200475,0.5514004,15.206405,5.593131,7.6342382,39.0,31.36576,6.9860883,4.5734158,2.2134538,39.0,36.786545,41.32319,15.146406,20.75912,103.5684,82.80928,322.19354,156.25807,158.53288,1416.0,1257.4672,2.1643503,1.2622356,0.8853815,11.0,10.114618,0.60276264,0.2316477,0.30013078,2.0,1.6998693,14.575665,5.9177623,7.2158694,56.0,48.78413,0.4033169,0.14496502,0.20332506,0.88,0.6766749,0.32881266,0.16571781,0.0,0.67752945,0.67752945,0.410925,0.36986786,0.0,3.0,3.0,2.082238,0.8453946,1.0308385,8.0,6.9691615,6.521027,2.338868,3.2887075,14.0,10.711292,4.3566766,1.7537994,2.2318594,8.130354,5.8984942,0.32881266,0.16571781,0.0,0.67752945,0.67752945,0.410925,0.36986786,0.0,3.0,3.0,2.082238,0.8453946,1.0308385,8.0,6.9691615,0.0,0.0,0.0,0.0,0.0,1.3425003,0.52788115,0.66548467,5.0,4.3345156,2.1439948,1.0768661,0.025,4.410717,4.385717,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.642857,39.0,14.602041,72.0,33.0,72.0,49.214287,39.0,5.4183674,84.0,45.0,45.0,147.51225,103.5684,39.828526,178.49,74.9216,178.49,2516.2856,1806.0,216.9796,2896.0,1090.0,2506.0,5.357143,11.0,1.744898,15.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.0,56.0,12.857142,175.0,119.0,175.0,1.6085714,0.88,0.39673468,2.18,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,3.4285715,8.0,1.8367347,10.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,14.357142,12.0,4.7755103,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.21428572,3.0,0.39795917,3.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.071429,5.0,1.1938776,8.0,3.0,8.0,20.338215,6.51,2.6561224,22.2,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.11940298,0.014925373,0.23880596,0.6268657,0.17601256,0.0781813 +27,11.956922,2.6210278e-10,11.956922,11.956922,6.0397186e-12,1.0,1.0,0.051981524,7.593752e-12,0.051981524,0.051981524,1.6999936e-11,0.6801666,22.45175,0.75,21.623777,23.95175,2.3279722,8.379953,2.25,5.896037,12.879953,6.9839168,62.125626,1.736375,60.20873,65.59837,5.3896437,329.5,73.81104,255.68895,450.25848,194.56953,3.0519814,0.75,2.2240093,4.5519814,2.3279722,0.7759908,6.9532158e-12,0.7759908,0.7759908,1.5296875e-11,22.295807,3.0,18.983917,28.295807,9.311889,0.7273521,0.03,0.6942332,0.7873521,0.09311889,0.7759908,6.9532158e-12,0.7759908,0.7759908,1.5296875e-11,0.0,0.0,0.0,0.0,0.0,3.8279722,0.75,3.0,5.3279724,2.3279722,10.863871,9.7344355e-11,10.863871,10.863871,2.1415758e-10,8.483917,0.75,7.6559443,9.983917,2.3279722,0.7759908,6.9532158e-12,0.7759908,0.7759908,1.5296875e-11,0.0,0.0,0.0,0.0,0.0,3.8279722,0.75,3.0,5.3279724,2.3279722,0.0,0.0,0.0,0.0,0.0,3.0519814,0.75,2.2240093,4.5519814,2.3279722,6.0766997,0.5125,5.5109186,7.1017,1.5907811,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,57.75,30.0,14.25,72.0,42.0,72.0,49.75,14.0,4.75,59.0,45.0,45.0,138.96138,82.53,39.528625,178.49,95.96,178.49,2536.25,659.0,179.875,2896.0,2237.0,2506.0,5.75,5.0,1.75,9.0,4.0,4.0,5.5,1.0,0.5,6.0,5.0,6.0,161.5,33.0,13.5,175.0,142.0,175.0,1.76,0.98,0.46,2.28,1.3,1.3,1.5,1.0,0.5,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.25,6.0,2.25,8.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,12.75,12.0,5.25,18.0,6.0,18.0,0.5,1.0,0.5,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.75,6.0,2.25,8.0,2.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.25,5.0,1.75,8.0,3.0,8.0,18.4325,8.56,3.7675,22.2,13.64,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.11764706,0.0,0.33333334,0.54901963,0.2134508,0.09601504 +28,11.56558,0.11255515,11.393577,11.813429,0.0060835714,0.9875726,1.0091933,0.035872284,0.012245801,0.015731456,0.061246164,0.015069007,0.61855817,18.778606,11.353186,5.7358847,52.96995,47.234066,8.045235,4.4104495,3.4277005,18.781971,15.354271,47.547966,29.125998,13.71173,137.39374,123.682,695.07916,378.72995,300.21924,1601.6396,1301.4204,1.2512337,0.75708675,0.3797483,3.5308144,3.1510663,1.0040663,0.5721512,0.38132843,2.53133,2.1500018,10.120122,6.3975363,2.494061,30.953646,28.459585,0.21285683,0.15987228,0.0021433113,0.85234594,0.8502026,0.24750403,0.18812399,0.00029466234,1.0,0.9997053,0.0,0.0,0.0,0.0,0.0,1.4987377,0.9419185,0.38004297,4.5308146,4.1507716,3.46977,2.6325576,0.014894216,14.0,13.9851055,4.48654,2.9915807,0.77329564,15.06266,14.289365,0.24750403,0.18812399,0.00029466234,1.0,0.9997053,0.0,0.0,0.0,0.0,0.0,2.6340861,1.5227419,0.9538531,6.8283253,5.8744726,0.0,0.0,0.0,0.0,0.0,2.7643583,1.5317258,1.1429214,6.5939903,5.451069,1.8721491,1.3369155,0.1327281,7.038131,6.905403,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.77055174,0.7523077,0.75003856,0.7500008,0.75,23.25,60.0,16.875,72.0,12.0,12.0,62.875,23.0,7.6875,68.0,45.0,68.0,52.535,154.185,42.345,178.49,24.305,24.305,1367.5,1973.0,666.75,2896.0,923.0,923.0,2.75,4.0,1.125,6.0,2.0,2.0,3.625,3.0,0.9375,6.0,3.0,3.0,146.875,34.0,8.8125,175.0,141.0,141.0,1.415,0.86,0.18625,2.16,1.3,1.31,1.875,1.0,0.21875,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.875,5.0,1.3125,5.0,0.0,0.0,1.75,14.0,3.0625,14.0,0.0,0.0,4.5,16.0,3.75,18.0,2.0,2.0,0.125,1.0,0.21875,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.625,8.0,2.4375,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.75,8.0,2.625,8.0,0.0,0.0,21.90375,7.2,1.5534375,22.89,15.69,22.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41666666,0.0,0.19444445,0.3888889,0.1688126,0.03626477 +29,4.2826424,0.09232698,4.0991764,4.467296,0.02124104,0.9575179,1.0413013,0.6482367,0.01308187,0.6221842,0.67440045,0.02946,0.037303876,26.19877,2.9800596,23.458368,32.15889,8.700521,9.875611,3.1934922,6.9521337,16.262596,9.310462,70.51365,7.4349074,63.672928,85.38347,21.710539,713.53033,220.69026,521.1081,1154.9109,633.8028,3.6709073,1.140657,2.6265907,5.9522214,3.3256304,1.1371108,0.2284786,0.9276383,1.5940679,0.6664297,24.344889,5.0259504,19.737509,34.39679,14.659281,0.52223474,0.060635675,0.46827418,0.6435061,0.17523192,0.67154884,0.000578694,0.67053354,0.67270625,0.0021726773,0.0,0.0,0.0,0.0,0.0,4.3424563,1.1412356,3.2971241,6.6249275,3.3278031,9.401684,0.008101716,9.38747,9.417888,0.030417481,6.9951773,1.1683747,5.9522214,9.331926,3.3797052,0.67154884,0.000578694,0.67053354,0.67270625,0.0021726773,0.0,0.0,0.0,0.0,0.0,4.3424563,1.1412356,3.2971241,6.6249275,3.3278031,0.0,0.0,0.0,0.0,0.0,3.6709073,1.140657,2.6265907,5.9522214,3.3256304,6.5226793,1.0566648,5.552997,8.636009,3.0830116,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,53.75,43.0,18.25,72.0,29.0,72.0,51.0,19.0,6.5,64.0,45.0,45.0,129.1215,114.944,49.3685,178.49,63.546,178.49,2316.4424,1538.23,479.33624,2896.0,1357.77,2506.0,6.25,7.0,2.375,11.0,4.0,4.0,5.25,2.0,0.75,6.0,4.0,6.0,159.0,43.0,16.0,175.0,132.0,175.0,1.665,0.86,0.365,2.16,1.3,1.3,1.5,1.0,0.5,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.75,8.0,2.75,10.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,13.25,12.0,4.75,18.0,6.0,18.0,0.5,1.0,0.5,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,5.75,7.0,2.375,8.0,1.0,8.0,17.79,11.13,4.41,22.2,11.07,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.11320755,0.0,0.35849056,0.5283019,0.1688126,0.065815106 +30,11.890259,0.0399488,11.806208,12.011372,0.0047885478,0.98061883,1.0106368,0.021617152,0.0040890807,0.013952496,0.032115046,0.0134887835,0.65348774,9.790158,8.964875,0.0,54.509968,54.509968,4.2515907,3.7724562,0.0,19.70598,19.70598,24.74108,22.756475,0.0,141.07205,141.07205,367.00662,324.67615,0.0,1676.1902,1676.1902,0.6491313,0.5928069,0.0,3.6250033,3.6250033,0.5278692,0.4740838,0.0,2.6339977,2.6339977,5.2408657,4.8733754,0.0,31.620985,31.620985,0.10582786,0.10421221,0.0,0.851875,0.851875,0.123035036,0.12114878,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.7721663,0.71395564,0.0,4.6250033,4.6250033,1.747312,1.7300432,0.0,14.0,14.0,2.3038182,2.1839128,0.0,15.267996,15.267996,0.123035036,0.12114878,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.382077,1.2469968,0.0,7.0849943,7.0849943,0.0,0.0,0.0,0.0,0.0,1.4587996,1.298677,0.0,6.9019933,6.9019933,0.9400142,0.90960425,0.0,7.0706263,7.0706263,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.87945294,0.87521255,0.87500066,0.875,0.875,17.625,60.0,9.84375,72.0,12.0,12.0,65.4375,23.0,4.484375,68.0,45.0,68.0,38.42,154.185,24.70125,178.49,24.305,24.305,1145.25,1973.0,388.9375,2896.0,923.0,923.0,2.375,4.0,0.65625,6.0,2.0,2.0,3.3125,3.0,0.546875,6.0,3.0,3.0,143.9375,34.0,5.140625,175.0,141.0,141.0,1.3625,0.86,0.0996875,2.16,1.3,1.31,1.9375,1.0,0.1171875,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4375,5.0,0.765625,5.0,0.0,0.0,0.875,14.0,1.640625,14.0,0.0,0.0,3.25,16.0,2.1875,18.0,2.0,2.0,0.0625,1.0,0.1171875,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8125,8.0,1.421875,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.875,8.0,1.53125,8.0,0.0,0.0,22.396875,7.2,0.86296874,22.89,15.69,22.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.59615386,0.0,0.13461539,0.26923078,0.1688126,0.019395985 +31,11.806237,0.0,11.806237,11.806237,7.4660336e-17,1.0,1.0,0.05571834,1.3877788e-17,0.05571834,0.05571834,9.667233e-17,0.6132264,17.479486,1.7763568e-15,17.479486,17.479486,3.5527137e-15,2.9132478,2.220446e-16,2.9132478,2.9132478,4.440892e-16,48.086067,0.0,48.086067,48.086067,0.0,227.23332,2.842171e-14,227.23332,227.23332,5.684342e-14,1.165299,1.110223e-16,1.165299,1.165299,2.220446e-16,0.5826495,5.551115e-17,0.5826495,0.5826495,1.110223e-16,12.235641,8.881784e-16,12.235641,12.235641,1.7763568e-15,0.5010786,0.0,0.5010786,0.5010786,0.0,0.5826495,5.551115e-17,0.5826495,0.5826495,1.110223e-16,0.0,0.0,0.0,0.0,0.0,1.7479486,2.220446e-16,1.7479486,1.7479486,4.440892e-16,8.157094,0.0,8.157094,8.157094,0.0,6.9917946,8.881784e-16,6.9917946,6.9917946,1.7763568e-15,0.5826495,5.551115e-17,0.5826495,0.5826495,1.110223e-16,0.0,0.0,0.0,0.0,0.0,1.7479486,2.220446e-16,1.7479486,1.7479486,4.440892e-16,0.0,0.0,0.0,0.0,0.0,1.165299,1.110223e-16,1.165299,1.165299,2.220446e-16,3.7930486,2.220446e-16,3.7930486,3.7930486,4.440892e-16,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.70710677,0.62996054,0.57434916,0.55204475,0.5358867,57.0,30.0,15.0,72.0,42.0,57.0,47.5,5.0,2.5,50.0,45.0,47.5,137.225,82.53,41.265,178.49,95.96,137.225,2701.0,390.0,195.0,2896.0,2506.0,2701.0,5.0,2.0,1.0,6.0,4.0,5.0,5.5,1.0,0.5,6.0,5.0,5.5,164.5,21.0,10.5,175.0,154.0,164.5,1.73,0.86,0.43,2.16,1.3,1.73,1.5,1.0,0.5,2.0,1.0,1.5,0.0,0.0,0.0,0.0,0.0,0.0,3.5,3.0,1.5,5.0,2.0,3.5,7.0,14.0,7.0,14.0,0.0,7.0,12.0,12.0,6.0,18.0,6.0,12.0,0.5,1.0,0.5,1.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,6.5,3.0,1.5,8.0,5.0,6.5,0.0,0.0,0.0,0.0,0.0,0.0,7.0,2.0,1.0,8.0,6.0,7.0,18.945,6.51,3.255,22.2,15.69,18.945,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.125,0.0,0.29166666,0.5833333,0.1688126,0.0844063 +32,13.121941,1.9161977,11.624247,15.9974575,0.045403026,0.96095467,1.0719976,0.057689674,0.027508954,0.01493186,0.08118196,0.09488492,0.6565537,18.173439,7.6487336,12.10061,36.42143,24.32082,6.17649,3.4669943,2.1247444,24.236904,22.11216,48.123817,20.045732,31.081987,90.12438,59.042397,478.87576,248.9924,165.73006,1866.0514,1700.3214,2.0894206,1.0798227,0.84989774,7.494762,6.644864,0.7848182,0.35589865,0.42494887,2.247381,1.8224322,14.921828,6.5251985,8.923926,38.195,29.271074,0.42697233,0.13298103,0.28940147,0.70602554,0.41662407,0.52927417,0.14832145,0.41016442,0.75473243,0.34456804,0.17326145,0.14471097,0.0,1.0,1.0,1.9343455,0.6293872,1.2748466,4.257857,2.9830105,6.1361337,2.766948,2.881789,13.138444,10.256655,5.7793274,2.2664754,3.083072,11.812706,8.729634,0.52927417,0.14832145,0.41016442,0.75473243,0.34456804,0.86630726,0.72355485,0.0,5.0,5.0,2.181193,0.9585173,1.2748466,5.742143,4.467296,0.0,0.0,0.0,0.0,0.0,1.049852,0.4238516,0.6160065,2.0606482,1.4446418,2.9251754,1.2182628,1.5014465,5.9642315,4.462785,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.7168604,0.6786044,0.66765654,0.666766,0.6666703,47.083332,59.0,12.458333,72.0,13.0,42.0,50.666668,28.0,3.7222223,73.0,45.0,50.0,110.84429,151.50847,33.822853,178.49,26.981539,95.96,2634.9558,1962.53,348.0589,2896.0,933.47,2896.0,6.0833335,9.0,1.1527778,13.0,4.0,6.0,5.0833335,3.0,0.45833334,6.0,3.0,5.0,156.5,54.0,9.25,175.0,121.0,154.0,1.8991667,0.86,0.34777778,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.083333336,1.0,0.15277778,1.0,0.0,0.0,3.8333333,5.0,1.5555556,5.0,0.0,5.0,3.5,14.0,5.25,14.0,0.0,0.0,8.75,15.0,4.625,18.0,3.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.41666666,5.0,0.7638889,5.0,0.0,0.0,5.3333335,8.0,1.3333334,8.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.4166665,3.0,0.7916667,8.0,5.0,6.0,17.383333,6.51,2.4083333,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15238096,0.00952381,0.43809524,0.4,0.1688126,0.06535289 +33,11.956922,2.220446e-15,11.956922,11.956922,1.7257779e-16,1.0,1.0,0.051981524,9.1940344e-17,0.051981524,0.051981524,3.8399183e-17,0.68017477,20.360138,1.0,19.256176,22.360138,3.1039631,21.0,8.969989,10.228319,38.939976,28.711658,54.67874,2.085,52.37698,58.84874,6.471763,923.0,358.11975,477.47565,1607.3182,1129.8425,2.0,0.7759908,1.2240093,2.7759907,1.5519816,0.61200464,5.551115e-17,0.61200464,0.61200464,2.220446e-16,20.5,7.953905,12.464102,28.759907,16.295807,0.605,0.2347372,0.32844633,0.9957984,0.6673521,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,2.5,0.96998847,1.4480184,3.7759907,2.3279722,8.568065,1.7763568e-15,8.568065,8.568065,5.3290705e-15,9.344055,1.0,8.240092,11.344055,3.1039631,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,4.336014,1.25,2.95606,6.836014,3.8799539,0.0,0.0,0.0,0.0,0.0,4.224009,1.5,2.5680647,7.224009,4.6559443,19.27,7.642925,9.700866,34.55585,24.854984,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,56.0,34.0,16.0,72.0,38.0,72.0,37.0,42.0,14.5,50.0,8.0,45.0,135.14,90.87,43.35,178.49,87.62,178.49,2239.5,1846.0,594.75,2896.0,1050.0,2506.0,4.0,4.0,1.0,6.0,2.0,4.0,5.5,1.0,0.5,6.0,5.0,6.0,174.75,41.0,10.375,195.0,154.0,175.0,1.4275,1.21,0.36625,2.16,0.95,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,2.25,5.0,1.375,5.0,0.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,11.0,16.0,7.0,18.0,2.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5,8.0,2.75,8.0,0.0,8.0,28.58,38.54,12.825,54.23,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1590909,0.0,0.20454545,0.6363636,0.30651453,0.08805767 +34,4.113482,0.041698594,4.0303373,4.1968794,0.010622354,0.9787553,1.0209688,0.6771665,0.006302832,0.6645869,0.6897722,0.01458,0.03406904,19.487799,0.92656016,18.632837,21.34092,2.7080834,8.962899,2.758022,6.3741074,14.478944,8.104836,53.72928,2.4221807,51.495224,58.57364,7.0784154,494.48853,131.30553,363.183,721.60706,358.42404,3.2138329,0.91940904,2.3507757,5.052651,2.7018752,0.67859894,0.00020516629,0.67821944,0.67900926,0.0007898459,21.21296,3.4505537,17.97137,28.114067,10.142695,0.60216147,0.00936643,0.59321207,0.6208943,0.027682224,1.1427577,0.22995484,0.92680365,1.6026673,0.67586374,0.0,0.0,0.0,0.0,0.0,4.3565907,1.1493639,3.2775793,6.6553183,3.377739,9.500385,0.002872328,9.495072,9.50613,0.0110578425,7.2730184,0.9299758,6.4106693,9.13297,2.7223005,0.4610567,0.23287345,0.24464881,0.92680365,0.68215483,0.0,0.0,0.0,0.0,0.0,4.3565907,1.1493639,3.2775793,6.6553183,3.377739,0.0,0.0,0.0,0.0,0.0,4.1421504,1.3789084,2.847944,6.899967,4.052023,5.011802,0.29541522,4.7333965,5.602633,0.86923647,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,58.0,30.0,14.0,72.0,42.0,72.0,50.5,17.0,5.75,62.0,45.0,45.0,139.84,82.53,38.65,178.49,95.96,178.49,2434.0125,1067.95,302.98126,2896.0,1828.05,2506.0,6.0,6.0,2.0,10.0,4.0,4.0,5.5,1.0,0.5,6.0,5.0,6.0,160.75,36.0,14.25,175.0,139.0,175.0,1.74,0.9,0.44,2.2,1.3,1.3,1.25,2.0,0.75,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.75,8.0,2.75,10.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,13.0,12.0,5.0,18.0,6.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5,8.0,2.75,8.0,0.0,8.0,18.625,7.79,3.575,22.2,14.41,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.09615385,0.0,0.3653846,0.53846157,0.18331352,0.08808152 +35,12.33918,0.851351,8.592772,13.042729,0.04033414,0.9115894,1.0442109,0.047188405,0.00910124,0.0372373,0.057806518,0.10351454,0.54596037,17.727526,6.8359327,9.108128,58.0,48.891872,6.1136427,3.8409083,2.2218866,33.0,30.778112,47.52915,18.206165,24.447691,150.4045,125.95681,239.25806,93.12414,122.728584,819.0,696.2714,2.0122151,1.1411121,0.88875467,10.0,9.111245,0.7353315,0.3235241,0.3745446,3.0,2.6254554,15.577349,6.9175215,7.9319434,64.0,56.068058,0.36431667,0.12891635,0.19042532,0.6,0.4095747,0.32916725,0.164728,0.0,0.67512923,0.67512923,0.27077615,0.24703199,0.0,2.0,2.0,1.2582779,0.44541508,0.65808773,2.0253878,1.3673,6.5037746,2.3343327,3.3814402,14.0,10.61856,5.84544,2.1318417,3.0313904,14.0,10.96861,0.32916725,0.164728,0.0,0.67512923,0.67512923,0.5415523,0.49406397,0.0,4.0,4.0,2.0706062,0.8470562,1.0571272,8.0,6.942873,0.0,0.0,0.0,0.0,0.0,1.1998868,0.46446118,0.61607605,4.0,3.383924,2.381162,0.9362176,1.2564634,4.3950915,3.138628,0.104654975,0.09547786,0.0,0.773,0.773,0.0,0.0,0.0,0.0,0.0,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,59.285713,58.0,16.34694,72.0,14.0,72.0,48.785713,33.0,4.867347,78.0,45.0,45.0,144.16682,150.4045,44.129803,178.49,28.0855,178.49,2558.9285,1209.0,192.61224,2896.0,1687.0,2506.0,5.285714,10.0,1.6530613,14.0,4.0,4.0,5.5,3.0,0.64285713,6.0,3.0,6.0,164.42857,64.0,13.591837,175.0,111.0,175.0,1.5885714,0.86,0.3710204,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,2.7142856,5.0,1.3061224,5.0,0.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,13.571428,14.0,5.6938777,18.0,4.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,6.571429,8.0,1.8367347,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.142857,4.0,1.1020408,8.0,4.0,8.0,20.214285,6.51,2.5853062,22.2,15.69,22.2,0.055214286,0.773,0.10254081,0.773,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12631579,0.010526316,0.2,0.6631579,0.1688126,0.07060107 +36,4.124982,0.035475962,4.054214,4.195934,0.008932159,0.98213565,1.0176668,0.6737882,0.0053159264,0.66317374,0.68442005,0.012276,0.03477808,16.632273,6.345078,12.106988,29.32243,17.21544,9.4142275,2.9926617,6.6479855,15.399551,8.751565,45.796234,17.445953,33.33628,80.68814,47.35186,395.68307,106.69233,288.99075,526.30646,237.31572,3.2086391,0.9209064,2.3573232,5.050452,2.6931288,0.46156716,0.23276767,0.24819489,0.9271025,0.6789076,22.549675,4.146352,18.714476,30.842379,12.127904,0.63697916,0.027778707,0.6111544,0.69253653,0.081382185,0.6759,0.00018939463,0.67554337,0.67627877,0.00073539926,0.0,0.0,0.0,0.0,0.0,3.8845391,0.9210958,3.0328667,5.726731,2.693864,6.4619403,3.2587473,3.4747283,12.979435,9.504706,8.324065,2.255818,6.068247,12.634584,6.5663376,0.6759,0.00018939463,0.67554337,0.67627877,0.00073539926,0.0,0.0,0.0,0.0,0.0,3.8845391,0.9210958,3.0328667,5.726731,2.693864,0.0,0.0,0.0,0.0,0.0,3.2086391,0.9209064,2.3573232,5.050452,2.6931288,4.711129,0.15542133,4.566332,5.021972,0.45564017,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,66.0,36.0,12.0,78.0,42.0,72.0,50.75,18.0,6.125,63.0,45.0,45.0,162.006,99.124,33.023,195.084,95.96,178.49,2487.35,854.6,222.975,2896.0,2041.4,2506.0,6.0,6.0,2.0,10.0,4.0,4.0,5.75,1.0,0.375,6.0,5.0,6.0,160.0,39.0,15.0,175.0,136.0,175.0,1.76,0.98,0.46,2.28,1.3,1.3,1.5,1.0,0.5,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.5,7.0,2.5,9.0,2.0,2.0,10.5,14.0,5.25,14.0,0.0,14.0,16.5,18.0,5.25,24.0,6.0,18.0,0.5,1.0,0.5,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5,7.0,2.5,8.0,1.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,6.0,2.0,8.0,2.0,8.0,18.7775,7.18,3.4225,22.2,15.02,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.09090909,0.0,0.27272728,0.6363636,0.2134508,0.09601504 +37,13.053816,1.9560012,11.546312,15.998015,0.044599,0.9641242,1.0698408,0.056506414,0.0263699,0.015251733,0.08111269,0.09216653,0.63811415,14.864991,5.9083896,8.059022,29.8795,21.820478,7.2125177,4.2799807,2.0638807,30.22625,28.162369,40.167747,15.868714,21.338314,81.44756,60.109245,430.71027,209.13188,160.9827,1588.9525,1427.9697,2.2613704,1.2123375,0.82555234,8.4905,7.664948,0.61098796,0.25824556,0.40102148,1.2452501,0.8442286,15.082768,6.6184916,8.6683,39.150253,30.481953,0.39020783,0.15762304,0.20388587,0.77828515,0.57439923,0.52934253,0.14938493,0.40102148,0.7570199,0.3559984,0.3464648,0.2860355,0.0,2.0,2.0,2.1794288,0.9586343,1.2383285,5.73575,4.4974217,6.1285777,2.7773335,2.8341649,13.107394,10.27323,5.985762,1.8602663,4.0179334,9.801128,5.7831955,0.52934253,0.14938493,0.40102148,0.7570199,0.3559984,0.6929296,0.572071,0.0,4.0,4.0,2.1794288,0.9586343,1.2383285,5.73575,4.4974217,0.0,0.0,0.0,0.0,0.0,1.2219759,0.5164911,0.80204296,2.4905002,1.6884573,3.5854719,1.0465147,2.687173,5.718422,3.031249,0.066348016,0.054775797,0.0,0.383,0.383,0.0,0.0,0.0,0.0,0.0,3.0,0.7168604,0.6786044,0.66765654,0.666766,0.6666703,48.666668,40.0,11.666667,72.0,32.0,42.0,51.166668,34.0,4.638889,79.0,45.0,50.0,114.64917,105.85,31.920416,178.49,72.64,95.96,2658.1167,1684.6,317.17776,2896.0,1211.4,2896.0,6.1666665,10.0,1.3055556,14.0,4.0,6.0,5.1666665,2.0,0.41666666,6.0,4.0,5.0,156.41667,55.0,9.291667,175.0,120.0,154.0,1.9325,0.86,0.31625,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.16666667,2.0,0.30555555,2.0,0.0,0.0,4.6666665,8.0,1.3333334,10.0,2.0,5.0,3.5,14.0,5.25,14.0,0.0,0.0,9.666667,12.0,4.888889,18.0,6.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.33333334,4.0,0.6111111,4.0,0.0,0.0,5.3333335,8.0,1.3333334,8.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.3333335,4.0,0.8333333,8.0,4.0,6.0,17.927084,7.315,2.9827778,23.005,15.69,15.69,0.031916667,0.383,0.058513887,0.383,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13793103,0.01724138,0.4827586,0.36206895,0.1688126,0.061827745 +38,12.252001,0.806118,8.514234,12.976372,0.03545037,0.91169643,1.0441705,0.05221024,0.007892691,0.025085462,0.06425235,0.08978081,0.5363197,16.976765,3.2052822,8.682051,38.0,29.31795,7.9532743,5.0706964,2.308944,44.0,41.691055,46.02184,8.417845,23.509258,99.53,76.020744,439.08377,230.85152,180.09763,2098.0886,1917.9911,2.4195392,1.3371029,0.9235776,12.0,11.076423,0.6656478,0.17485458,0.3443231,2.0,1.655677,15.747,5.276461,8.2043915,55.0,46.79561,0.5085228,0.10471393,0.26091823,1.25,0.9890818,0.42407823,0.10953321,0.0,0.6580092,0.6580092,0.5441218,0.4936969,0.0,4.0,4.0,2.2690043,0.7714122,1.1827549,8.0,6.817245,7.414643,1.1976931,3.7717261,14.0,10.228273,5.300017,1.1191031,2.0,7.8961105,5.8961105,0.42407823,0.10953321,0.0,0.6580092,0.6580092,0.2720609,0.24684845,0.0,2.0,2.0,2.2690043,0.7714122,1.1827549,8.0,6.817245,0.0,0.0,0.0,0.0,0.0,1.6033565,0.5965576,0.83843184,6.0,5.161568,3.2667825,0.61771613,1.5443276,5.157019,3.6126914,0.108688325,0.09861595,0.0,0.799,0.799,0.0,0.0,0.0,0.0,0.0,3.0,0.6681531,0.59883225,0.5567276,0.54404414,0.53807646,57.5,38.0,15.535714,72.0,34.0,72.0,50.107143,44.0,5.5561223,89.0,45.0,45.0,138.95822,99.53,42.355484,178.49,78.96,178.49,2515.5,2402.0,298.9643,2896.0,494.0,2506.0,5.642857,12.0,1.7602041,16.0,4.0,4.0,5.464286,2.0,0.5739796,6.0,4.0,6.0,162.82143,55.0,13.04847,175.0,120.0,175.0,1.7271428,1.25,0.45765308,2.55,1.3,1.3,1.6071428,1.0,0.47704083,2.0,1.0,2.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,3.75,8.0,1.875,10.0,2.0,2.0,7.5,14.0,6.964286,14.0,0.0,14.0,13.142858,12.0,5.612245,18.0,6.0,18.0,0.39285713,1.0,0.47704083,1.0,0.0,0.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,6.25,8.0,1.875,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.785714,6.0,1.3010204,8.0,2.0,8.0,19.908215,10.23,3.3143113,25.92,15.69,22.2,0.05707143,0.799,0.1059898,0.799,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12228261,0.02173913,0.2853261,0.5706522,0.32336617,0.09789772 +39,12.324442,0.93954784,8.198678,13.12034,0.04156178,0.8864725,1.0466174,0.046106383,0.012750763,0.029927712,0.06285931,0.10413197,0.51667786,15.705015,6.038731,7.382607,44.0,36.617393,3.7759671,1.7462904,1.6810606,16.0,14.318939,43.037758,16.529629,20.244692,119.7966,99.55191,231.701,92.19733,106.68492,778.0,671.31506,1.4569192,0.64901155,0.65162605,6.0,5.348374,0.59479064,0.2367443,0.27381772,2.0,1.7261822,13.6936245,5.569904,6.21813,51.0,44.78187,0.36314738,0.12981299,0.17776845,0.61,0.43223155,0.3274527,0.1623815,0.0,0.6788079,0.6788079,0.0,0.0,0.0,0.0,0.0,1.7843719,0.7102329,0.82145315,6.0,5.178547,6.4557033,2.383772,3.1055138,14.0,10.894486,4.998784,1.7773336,2.4538877,8.145695,5.6918073,0.3274527,0.1623815,0.0,0.6788079,0.6788079,0.0,0.0,0.0,0.0,0.0,1.7843719,0.7102329,0.82145315,6.0,5.178547,0.0,0.0,0.0,0.0,0.0,1.4569192,0.64901155,0.65162605,6.0,5.348374,3.7197044,1.4654241,1.7232788,11.88,10.156721,0.0,0.0,0.0,0.0,0.0,0.0795858,0.07368699,0.0,0.5953947,0.5953947,3.0,0.70710677,0.66142225,0.6450736,0.6431713,0.64287645,60.285713,44.0,15.061225,72.0,28.0,72.0,47.57143,16.0,3.3061225,61.0,45.0,45.0,146.3531,119.7966,41.31887,178.49,58.6934,178.49,2561.8572,1168.0,190.93878,2896.0,1728.0,2506.0,5.0,6.0,1.2857143,10.0,4.0,4.0,5.571429,2.0,0.5510204,6.0,4.0,6.0,165.35715,51.0,12.397959,175.0,124.0,175.0,1.5892857,0.86,0.37193877,2.16,1.3,1.3,1.7142857,1.0,0.40816328,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.2857144,6.0,1.6530613,8.0,2.0,2.0,9.0,14.0,6.428571,14.0,0.0,14.0,14.0,12.0,5.142857,18.0,6.0,18.0,0.2857143,1.0,0.40816328,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.714286,6.0,1.6530613,8.0,2.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,6.0,1.2857143,8.0,2.0,8.0,19.491428,11.88,3.482449,22.2,10.32,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.042528193,0.5953947,0.07898093,0.5953947,0.0,0.0,0.12244898,0.0,0.23469388,0.64285713,0.1688126,0.07080338 +40,13.123931,1.9172076,11.686025,15.999747,0.04540439,0.96594673,1.0681566,0.057692856,0.027517982,0.016390711,0.078331344,0.094878726,0.6679337,17.369724,9.224589,10.451283,34.964867,24.513582,3.3411274,1.1105411,2.5082214,5.758608,3.2503865,45.377846,24.098831,27.303722,94.468765,67.16505,402.68442,162.08173,281.12314,858.17145,577.0483,1.0585507,0.29661837,0.8360869,1.5035133,0.66742647,0.6947826,0.3689855,0.41804346,1.2482784,0.83023494,8.386762,3.707926,5.6438813,19.51054,13.866658,0.41152847,0.1362288,0.30935687,0.70608914,0.39673227,0.52927536,0.14830919,0.41804346,0.75175667,0.33371323,0.0,0.0,0.0,0.0,0.0,1.5878261,0.44492757,1.2541304,2.25527,1.0011396,4.8637023,3.0454326,2.9265792,14.0,11.073421,4.8636384,2.5828502,2.9265008,12.496487,9.569986,0.52927536,0.14830919,0.41804346,0.75175667,0.33371323,0.0,0.0,0.0,0.0,0.0,1.5878261,0.44492757,1.2541304,2.25527,1.0011396,0.0,0.0,0.0,0.0,0.0,1.0585507,0.29661837,0.8360869,1.5035133,0.66742647,2.4434898,1.2727556,1.5698614,6.261757,4.691895,0.0,0.0,0.0,0.0,0.0,7.8275225e-06,4.901937e-06,4.7095136e-06,2.2533333e-05,1.7823819e-05,3.0,0.70710677,0.67354,0.66692686,0.6666783,0.6666668,43.666668,50.0,9.444445,72.0,22.0,42.0,48.0,7.0,2.6666667,50.0,43.0,50.0,101.6995,130.623,25.596834,178.49,47.867,95.96,2671.8333,955.0,298.8889,2896.0,1941.0,2896.0,5.3333335,2.0,0.8888889,6.0,4.0,6.0,5.0,2.0,0.33333334,6.0,4.0,5.0,158.5,21.0,6.0,175.0,154.0,154.0,1.9133333,0.86,0.3288889,2.16,1.3,2.16,1.3333334,1.0,0.44444445,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,3.0,1.3333334,5.0,2.0,5.0,2.3333333,14.0,3.8888888,14.0,0.0,0.0,7.6666665,14.0,3.4444444,18.0,4.0,6.0,0.6666667,1.0,0.44444445,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,1.3333334,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.6666665,2.0,0.8888889,8.0,6.0,6.0,16.941668,6.51,1.7527778,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,3.7555556e-06,2.2533333e-05,6.2592594e-06,2.2533333e-05,0.0,0.0,0.17391305,0.0,0.5217391,0.3043478,0.1688126,0.058669697 +41,4.3095846,0.19187064,3.9310725,4.693326,0.04466702,0.910666,1.084322,0.6548422,0.027482755,0.6004946,0.7098077,0.061434,0.033792518,26.402414,2.765024,23.650137,31.93246,8.282325,12.451513,4.3147144,8.19597,21.080942,12.884973,71.481255,7.0552497,64.45291,85.59176,21.138842,1037.7344,367.10724,779.5957,1771.9489,992.35315,4.2228537,1.3631679,2.877985,6.949189,4.071204,1.1642373,0.22818665,0.93848896,1.6206106,0.68212163,29.658525,7.285605,22.461037,44.229736,21.768698,0.4757437,0.12188433,0.36178696,0.7195124,0.3577254,0.45886043,0.23981425,0.2333735,0.93848896,0.70511544,0.0,0.0,0.0,0.0,0.0,4.4399014,1.1379572,3.3159704,6.715816,3.3998456,9.668996,0.020832025,9.635321,9.71066,0.075339176,6.897018,1.4316695,5.561952,9.760357,4.1984043,0.45886043,0.23981425,0.2333735,0.93848896,0.70511544,0.0,0.0,0.0,0.0,0.0,4.4399014,1.1379572,3.3159704,6.715816,3.3998456,0.0,0.0,0.0,0.0,0.0,4.2228537,1.3631679,2.877985,6.949189,4.071204,5.315402,0.40187556,4.9133596,6.119153,1.2057934,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,54.0,42.0,18.0,72.0,30.0,72.0,52.25,24.0,8.375,69.0,45.0,45.0,129.58,113.11,48.91,178.49,65.38,178.49,2150.17,2203.32,728.745,2896.0,692.68,2506.0,6.5,8.0,2.75,12.0,4.0,4.0,5.25,2.0,0.75,6.0,4.0,6.0,156.5,53.0,18.5,175.0,122.0,175.0,1.6025,0.86,0.3025,2.16,1.3,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.75,8.0,2.75,10.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,13.5,12.0,4.5,18.0,6.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5,8.0,2.75,8.0,0.0,8.0,18.5125,8.24,3.6875,22.2,13.96,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12962963,0.0,0.35185185,0.5185185,0.1688126,0.057612848 +42,13.054688,1.9563351,11.525344,15.989727,0.044689585,0.9623572,1.0671638,0.05673858,0.026338238,0.016542196,0.079198815,0.09242937,0.6303787,29.214659,5.5643272,18.046324,36.126934,18.08061,13.221662,4.0913754,6.5202475,20.694883,14.174636,75.403725,14.004467,47.557148,92.91128,45.35413,901.03046,285.21744,541.1646,1420.0541,878.8895,4.277163,1.2366564,2.1827288,6.5009513,4.3182225,1.4489213,0.34567338,0.8013392,2.0388834,1.2375443,26.289385,5.623212,15.087973,35.547,20.459028,0.41516387,0.08817152,0.23503147,0.6782832,0.44325176,0.4564688,0.16700858,0.19331822,1.0,0.8066818,0.4597735,0.18662874,0.19308665,0.8077282,0.6146415,2.2889535,0.47370404,1.7969552,4.172366,2.375411,7.4112396,2.0902438,5.7934365,10.551702,4.7582645,7.7318115,1.631518,5.5557184,10.379906,4.8241878,0.4564688,0.16700858,0.19331822,1.0,0.8066818,2.2988675,0.93314373,0.9654333,4.038641,3.0732074,3.8869903,0.85525024,2.2106676,5.3089223,3.098255,0.0,0.0,0.0,0.0,0.0,1.518522,0.29058662,1.0226281,1.9525828,0.92995477,3.388631,0.8103776,2.527563,4.6535597,2.1259966,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,42.333332,59.0,19.777779,72.0,13.0,42.333332,56.0,28.0,11.333333,73.0,45.0,56.0,100.47718,151.50847,52.00855,178.49,26.981539,100.47718,2111.8232,1962.53,785.5689,2896.0,933.47,2111.8232,7.6666665,9.0,3.5555556,13.0,4.0,7.6666665,4.6666665,3.0,1.1111112,6.0,3.0,4.6666665,150.0,54.0,19.333334,175.0,121.0,150.0,1.69,0.86,0.31333333,2.16,1.3,1.69,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,2.3333333,5.0,1.7777778,5.0,0.0,2.3333333,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,9.0,15.0,6.0,18.0,3.0,9.0,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,1.6666666,5.0,2.2222223,5.0,0.0,1.6666666,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,6.3333335,3.0,1.1111112,8.0,5.0,6.3333335,18.123333,6.51,2.7177777,22.2,15.69,18.123333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.18518518,0.037037037,0.25925925,0.5185185,0.1688126,0.058974992 +43,11.920832,0.027244095,11.886776,11.948261,0.009461029,0.9881737,1.0198349,0.052121975,0.00026566672,0.05156218,0.052454058,0.02762,0.61685055,6.336381,5.0691047,0.0,15.56413,15.56413,1.0560635,0.8448508,0.0,2.5940218,2.5940218,17.431383,13.945107,0.0,42.81692,42.81692,82.37295,65.89836,0.0,202.3337,202.3337,0.4224254,0.3379403,0.0,1.0376086,1.0376086,0.2112127,0.16897015,0.0,0.5188043,0.5188043,4.435467,3.5483732,0.0,10.894891,10.894891,0.18164292,0.14531434,0.0,0.44617173,0.44617173,0.2112127,0.16897015,0.0,0.5188043,0.5188043,0.0,0.0,0.0,0.0,0.0,0.6336381,0.50691044,0.0,1.556413,1.556413,2.9569778,2.3655822,0.0,7.263261,7.263261,2.5345523,2.0276418,0.0,6.225652,6.225652,0.2112127,0.16897015,0.0,0.5188043,0.5188043,0.0,0.0,0.0,0.0,0.0,0.6336381,0.50691044,0.0,1.556413,1.556413,0.0,0.0,0.0,0.0,0.0,0.4224254,0.3379403,0.0,1.0376086,1.0376086,1.3749946,1.0999957,0.0,3.3774161,3.3774161,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.82462114,0.80414516,0.8001562,0.800007,0.8000001,48.0,30.0,9.6,72.0,42.0,42.0,49.0,5.0,1.6,50.0,45.0,50.0,112.466,82.53,26.4096,178.49,95.96,95.96,2818.0,390.0,124.8,2896.0,2506.0,2896.0,5.6,2.0,0.64,6.0,4.0,6.0,5.2,1.0,0.32,6.0,5.0,5.0,158.2,21.0,6.72,175.0,154.0,154.0,1.988,0.86,0.2752,2.16,1.3,2.16,1.2,1.0,0.32,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,4.4,3.0,0.96,5.0,2.0,5.0,2.8,14.0,4.48,14.0,0.0,0.0,8.4,12.0,3.84,18.0,6.0,6.0,0.8,1.0,0.32,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,5.6,3.0,0.96,8.0,5.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.4,2.0,0.64,8.0,6.0,6.0,16.992,6.51,2.0832,22.2,15.69,15.69,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14285715,0.0,0.52380955,0.33333334,0.1688126,0.054020032 +44,11.956922,2.220446e-15,11.956922,11.956922,1.5081874e-16,1.0,1.0,0.051981524,6.7654216e-17,0.051981524,0.051981524,5.0056818e-17,0.68017477,18.0,6.6720276,12.720277,31.344055,18.623777,10.379953,3.25,6.7920737,16.879953,10.08788,49.562,18.342606,35.081482,86.24721,51.165726,427.3,95.71915,331.58084,551.9636,220.38275,3.5519814,1.0,2.4480186,5.5519814,3.1039631,0.5,0.25,0.22400923,1.0,0.7759908,25.295807,4.5,20.327972,34.295807,13.9678335,0.7273521,0.03,0.6942332,0.7873521,0.09311889,0.7759908,2.7755576e-17,0.7759908,0.7759908,1.110223e-16,0.0,0.0,0.0,0.0,0.0,4.3279724,1.0,3.2240093,6.3279724,3.1039631,7.0,3.5,3.1361294,14.0,10.863871,9.0,2.1720278,6.9839168,13.344055,6.3601384,0.7759908,2.7755576e-17,0.7759908,0.7759908,1.110223e-16,0.0,0.0,0.0,0.0,0.0,4.3279724,1.0,3.2240093,6.3279724,3.1039631,0.0,0.0,0.0,0.0,0.0,3.5519814,1.0,2.4480186,5.5519814,3.1039631,5.3866997,0.1675,5.201786,5.7216997,0.5199138,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,66.0,36.0,12.0,78.0,42.0,72.0,50.75,18.0,6.125,63.0,45.0,45.0,162.006,99.124,33.023,195.084,95.96,178.49,2487.35,854.6,222.975,2896.0,2041.4,2506.0,6.0,6.0,2.0,10.0,4.0,4.0,5.75,1.0,0.375,6.0,5.0,6.0,160.0,39.0,15.0,175.0,136.0,175.0,1.76,0.98,0.46,2.28,1.3,1.3,1.5,1.0,0.5,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.5,7.0,2.5,9.0,2.0,2.0,10.5,14.0,5.25,14.0,0.0,14.0,16.5,18.0,5.25,24.0,6.0,18.0,0.5,1.0,0.5,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5,7.0,2.5,8.0,1.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,6.0,2.0,8.0,2.0,8.0,18.7775,7.18,3.4225,22.2,15.02,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.09090909,0.0,0.27272728,0.6363636,0.2134508,0.09601504 +45,11.956922,3.1870506e-11,11.956922,11.956922,4.861483e-13,1.0,1.0,0.051981524,3.494583e-13,0.051981524,0.051981524,2.531407e-12,0.68016917,17.5,6.8100233,11.720277,31.120047,19.399769,8.879953,2.5,6.120046,13.879953,7.7599077,48.1285,18.738237,32.21448,85.60497,53.39049,259.30585,39.25,215.9753,337.80585,121.83055,3.0519814,0.75,2.2240093,4.5519814,2.3279722,0.5,0.25,0.22400923,1.0,0.7759908,22.795807,3.25,19.207926,29.295807,10.08788,0.68735206,0.01,0.67631245,0.70735204,0.031039631,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,3.3279722,0.5,2.7759907,4.3279724,1.5519816,7.0,3.5,3.1361294,14.0,10.863871,8.5,2.310023,6.5959215,13.120047,6.5241246,0.5,0.25,0.22400923,1.0,0.7759908,0.0,0.0,0.0,0.0,0.0,3.3279722,0.5,2.7759907,4.3279724,1.5519816,0.0,0.0,0.0,0.0,0.0,3.0519814,0.75,2.2240093,4.5519814,2.3279722,5.7917,0.37,5.3832335,6.5316997,1.1484663,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,65.75,35.0,11.875,77.0,42.0,72.0,50.0,15.0,5.0,60.0,45.0,45.0,161.28925,96.257,32.664623,192.217,95.96,178.49,2661.75,390.0,155.75,2896.0,2506.0,2506.0,5.75,5.0,1.75,9.0,4.0,4.0,5.75,1.0,0.375,6.0,5.0,6.0,161.25,34.0,13.75,175.0,141.0,175.0,1.74,0.9,0.44,2.2,1.3,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,5.0,2.0,7.0,2.0,2.0,10.5,14.0,5.25,14.0,0.0,14.0,16.25,17.0,5.125,23.0,6.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,5.0,2.0,8.0,3.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.25,5.0,1.75,8.0,3.0,8.0,18.575,7.99,3.625,22.2,14.21,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10769231,0.0,0.24615385,0.64615387,0.18331352,0.08808152 +46,10.327522,1.24156,8.845877,12.640654,0.034104243,0.9558891,1.0729406,0.06802664,0.017378658,0.043601986,0.099896014,0.06642596,0.5539466,34.331936,7.4495535,23.558552,45.50283,21.944277,23.966583,2.7582083,19.339941,27.549467,8.209527,87.28994,20.205551,58.762962,119.029236,60.266277,862.51514,101.02814,635.2018,1005.03186,369.83008,7.0845175,0.79586893,5.622123,8.111312,2.4891891,1.9784365,0.28448448,1.4899029,2.2378588,0.747956,42.32908,6.0308986,31.94854,47.903934,15.955395,0.3968551,0.08608544,0.3005118,0.5905473,0.2900355,0.56956375,0.21841064,0.21800649,0.8676674,0.6496609,1.5638223,0.21135366,1.3211584,1.8177971,0.49663863,3.2725136,0.5975097,1.9975586,4.0328937,2.0353353,5.8045983,3.065094,2.138101,12.456864,10.318764,6.539192,2.6901875,3.2625499,12.020852,8.758302,0.56956375,0.21841064,0.21800649,0.8676674,0.6496609,3.1276445,0.42270732,2.6423168,3.6355941,0.99327725,5.1533985,0.8142156,3.8091295,6.028176,2.2190466,0.0,0.0,0.0,0.0,0.0,2.3930507,0.50083447,1.6586473,3.1230912,1.4644439,3.4351487,0.6521244,2.6015368,4.3902097,1.7886732,0.6044173,0.08168819,0.51062775,0.70257854,0.19195083,0.0,0.0,0.0,0.0,0.0,3.0,0.59835166,0.5140072,0.46610025,0.45291558,0.4469273,36.22222,58.0,19.753086,72.0,14.0,14.0,61.333332,33.0,14.814815,78.0,45.0,78.0,84.13355,150.4045,49.820496,178.49,28.0855,28.0855,2272.0,1209.0,520.0,2896.0,1687.0,1687.0,9.111111,10.0,4.345679,14.0,4.0,14.0,4.3333335,3.0,1.1851852,6.0,3.0,3.0,139.55556,64.0,25.382715,175.0,111.0,111.0,1.8533334,0.86,0.24592593,2.16,1.3,1.9,1.6666666,1.0,0.44444445,2.0,1.0,2.0,0.8888889,2.0,0.9876543,2.0,0.0,2.0,2.1111112,5.0,1.925926,5.0,0.0,0.0,3.1111112,14.0,4.839506,14.0,0.0,0.0,7.7777777,14.0,4.54321,18.0,4.0,4.0,0.33333334,1.0,0.44444445,1.0,0.0,0.0,1.7777778,4.0,1.9753087,4.0,0.0,4.0,3.4444444,8.0,3.0617285,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.5555553,4.0,1.3827161,8.0,4.0,4.0,19.247778,6.51,2.371852,22.2,15.69,20.44,0.34355557,0.773,0.38172838,0.773,0.0,0.773,0.0,0.0,0.0,0.0,0.0,0.0,0.21428572,0.114285715,0.27142859,0.4,0.1688126,0.046975855 +47,12.30493,0.8297993,8.639354,13.227287,0.038414974,0.90544367,1.0556711,0.048244882,0.0075762477,0.03464853,0.05968119,0.09797196,0.54413664,17.191843,4.2939653,8.275771,40.0,31.724228,6.562156,3.8643272,2.2538614,34.0,31.74614,46.72806,11.548304,22.442858,105.85,83.40714,336.408,149.23105,165.63754,1355.0583,1189.4208,2.1379604,1.1010026,0.9015445,10.0,9.098455,0.6632285,0.19794282,0.32737687,2.0,1.6726232,15.686055,5.6059265,7.8795123,55.0,47.120487,0.43690765,0.0994234,0.2034946,0.71,0.5065054,0.4136221,0.14186679,0.0,0.7784862,0.7784862,0.27050114,0.24707127,0.0,2.0,2.0,2.2601867,0.8171375,1.1366842,8.0,6.863316,7.3916907,1.7101642,3.5014017,14.0,10.498598,5.4208884,1.3352299,2.3829875,9.066745,6.6837573,0.4136221,0.14186679,0.0,0.7784862,0.7784862,0.5410023,0.49414253,0.0,4.0,4.0,2.2601867,0.8171375,1.1366842,8.0,6.863316,0.0,0.0,0.0,0.0,0.0,1.326457,0.39588565,0.65475374,4.0,3.3452463,2.8015566,0.8488463,0.805,5.1233068,4.3183064,0.051800966,0.04731415,0.0,0.383,0.383,0.0,0.0,0.0,0.0,0.0,3.0,0.6776309,0.6149109,0.5819375,0.5744222,0.57194614,58.42857,40.0,15.510204,72.0,32.0,72.0,49.214287,34.0,4.8163266,79.0,45.0,45.0,141.45428,105.85,42.32653,178.49,72.64,178.49,2552.8142,1684.6,245.13266,2896.0,1211.4,2506.0,5.428571,10.0,1.6326531,14.0,4.0,4.0,5.5,2.0,0.5714286,6.0,4.0,6.0,163.57143,55.0,13.061225,175.0,120.0,175.0,1.6578572,0.86,0.4089796,2.16,1.3,1.3,1.6428572,1.0,0.45918366,2.0,1.0,2.0,0.14285715,2.0,0.26530612,2.0,0.0,0.0,3.642857,8.0,1.8775511,10.0,2.0,2.0,8.0,14.0,6.857143,14.0,0.0,14.0,13.428572,12.0,5.3061223,18.0,6.0,18.0,0.35714287,1.0,0.45918366,1.0,0.0,0.0,0.2857143,4.0,0.53061223,4.0,0.0,0.0,6.357143,8.0,1.8775511,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,4.0,1.1428572,8.0,4.0,8.0,19.9325,7.315,3.0303571,23.005,15.69,22.2,0.027357142,0.383,0.050806124,0.383,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.122340426,0.010638298,0.2712766,0.59574467,0.1688126,0.07885518 +48,9.314539,0.87635905,8.0,9.9718075,0.037257105,0.9441143,1.0279428,0.046325535,0.030883688,8.0676685e-17,0.0694883,0.104166664,0.51013106,24.051508,6.6323285,17.373068,34.0,16.626932,30.048199,7.3880296,24.5,41.13024,16.630243,62.70358,18.88428,44.918957,91.03,46.111042,1660.5438,383.5315,1201.0,2235.841,1034.8411,8.147903,1.802796,6.591612,10.852097,4.260485,1.3086828,0.20578855,1.0,1.5,0.5,36.469463,5.338729,28.46137,42.947018,14.485649,0.8005482,0.16963455,0.5902428,1.055,0.4647572,0.69131714,0.20578855,0.5,1.0,0.5,2.7652686,0.8231542,2.0,4.0,2.0,5.308683,0.77385324,4.147903,6.2781453,2.1302426,8.643119,3.5712538,5.964679,14.0,8.035321,8.147903,1.802796,6.591612,10.852097,4.260485,0.69131714,0.20578855,0.5,1.0,0.5,1.3826343,0.4115771,1.0,2.0,1.0,5.308683,0.77385324,4.147903,6.2781453,2.1302426,0.0,0.0,0.0,0.0,0.0,4.0,0.5680647,3.147903,4.852097,1.704194,7.0721745,1.3047831,5.115,8.6451,3.5300996,0.55236244,0.16442506,0.3995,0.799,0.3995,0.0,0.0,0.0,0.0,0.0,3.0,0.57735026,0.48074985,0.41524366,0.38997695,0.37204105,49.333332,38.0,15.111111,72.0,34.0,49.333332,61.333332,44.0,18.444445,89.0,45.0,61.333332,117.80334,99.53,40.45778,178.49,78.96,117.80334,1965.3334,2402.0,980.8889,2896.0,494.0,1965.3334,8.666667,12.0,4.888889,16.0,4.0,8.666667,5.0,2.0,0.6666667,6.0,4.0,5.0,149.66667,55.0,19.777779,175.0,120.0,149.66667,2.0033333,1.25,0.46888888,2.55,1.3,2.0033333,1.6666666,1.0,0.44444445,2.0,1.0,1.6666666,1.3333334,4.0,1.7777778,4.0,0.0,1.3333334,5.6666665,8.0,2.8888888,10.0,2.0,5.6666665,4.6666665,14.0,6.2222223,14.0,0.0,4.6666665,13.333333,12.0,4.888889,18.0,6.0,13.333333,0.33333334,1.0,0.44444445,1.0,0.0,0.33333334,0.6666667,2.0,0.8888889,2.0,0.0,0.6666667,4.3333335,8.0,2.8888888,8.0,0.0,4.3333335,0.0,0.0,0.0,0.0,0.0,0.0,5.3333335,6.0,2.2222223,8.0,2.0,5.3333335,21.27,10.23,3.72,25.92,15.69,21.27,0.26633334,0.799,0.35511112,0.799,0.0,0.26633334,0.0,0.0,0.0,0.0,0.0,0.0,0.125,0.1,0.425,0.35,0.32336617,0.11766442 +49,4.1524224,0.1600127,3.8358934,4.472448,0.041532345,0.9169353,1.0790808,0.68377316,0.024732089,0.63489956,0.7332373,0.056596,0.030192725,19.311728,1.652292,17.66415,22.616312,4.9521623,15.213338,5.635888,9.277443,26.485115,17.20767,53.224037,4.4506125,48.787537,62.12526,13.337726,1150.4989,421.13474,842.94977,1992.7684,1149.8187,4.6801705,1.5784127,3.0176554,7.8369956,4.81934,0.7007266,0.0006064574,0.699952,0.7019395,0.0019875455,20.335917,2.7165065,17.47228,25.76893,8.29665,0.50979245,0.08999354,0.41942027,0.6897795,0.27035922,0.45643067,0.23731433,0.2170771,0.93105936,0.7139822,0.46838817,0.22531424,0.23110732,0.91901666,0.6879093,4.444121,1.1283906,3.2553926,6.700902,3.4455092,9.810173,0.008490403,9.799328,9.827154,0.027825639,6.698648,1.6592637,5.0292373,10.017176,4.987938,0.45643067,0.23731433,0.2170771,0.93105936,0.7139822,2.3419409,1.1265712,1.1555367,4.595083,3.4395466,4.444121,1.1283906,3.2553926,6.700902,3.4455092,0.0,0.0,0.0,0.0,0.0,1.8698415,0.22652715,1.6310114,2.3228958,0.6918844,3.9362433,1.300517,2.9506748,6.537277,3.5866024,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.61237246,0.5386087,0.50609934,0.50110865,0.5000976,58.75,30.0,13.25,72.0,42.0,72.0,53.75,30.0,10.625,75.0,45.0,45.0,141.9395,82.53,36.5505,178.49,95.96,178.49,2084.4375,2466.25,827.34375,2896.0,429.75,2506.0,6.75,9.0,3.125,13.0,4.0,4.0,5.5,1.0,0.5,6.0,5.0,6.0,161.5,33.0,13.5,175.0,142.0,175.0,1.635,0.86,0.335,2.16,1.3,1.3,1.75,1.0,0.375,2.0,1.0,2.0,0.25,1.0,0.375,1.0,0.0,0.0,4.75,8.0,2.75,10.0,2.0,2.0,7.0,14.0,7.0,14.0,0.0,14.0,13.75,12.0,4.25,18.0,6.0,18.0,0.25,1.0,0.375,1.0,0.0,0.0,1.25,5.0,1.875,5.0,0.0,0.0,5.25,8.0,2.75,8.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,6.75,3.0,1.25,8.0,5.0,8.0,21.0875,8.57,2.69875,24.26,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12727273,0.018181818,0.34545454,0.5090909,0.1688126,0.0606283 +50,13.061944,1.9567736,11.575911,15.997105,0.044741977,0.9655974,1.0671129,0.056723703,0.026618293,0.016796265,0.076810844,0.09263918,0.6490608,25.0451,6.976775,18.068323,40.399014,22.330688,11.811847,6.084885,7.59798,30.066502,22.468523,65.90287,17.996195,47.90667,101.984184,54.07751,499.7952,182.67255,373.9966,1047.8129,673.8162,3.6772876,1.7164377,2.4101737,8.826601,6.416427,1.183987,0.40977114,0.81195027,2.4133005,1.6013502,25.191055,8.829418,17.247524,51.67931,34.431786,0.47207353,0.08463643,0.40052214,0.59902817,0.19850604,0.5941365,0.021245556,0.5659866,0.615382,0.04939546,0.6546569,0.4484477,0.37426582,2.0,1.6257342,2.4370663,0.44101086,2.0722256,3.7600987,1.6878734,7.410621,2.0887008,5.786206,10.543673,4.757467,7.006618,1.6033876,5.4189014,9.411699,3.9927979,0.5941365,0.021245556,0.5659866,0.615382,0.04939546,1.3093138,0.8968954,0.74853164,4.0,3.2514684,3.2246325,1.0050896,2.2392826,6.239901,4.0006185,0.0,0.0,0.0,0.0,0.0,1.713317,0.48255304,1.230764,2.8266008,1.5958368,3.7537692,0.17343816,3.5142317,4.0139265,0.4996946,0.25302488,0.17332505,0.14465374,0.773,0.62834626,0.0,0.0,0.0,0.0,0.0,3.0,0.62360954,0.5503212,0.5128975,0.50411046,0.5008612,47.333332,58.0,16.444445,72.0,14.0,42.0,53.0,33.0,8.333333,78.0,45.0,50.0,112.157585,150.4045,44.22161,178.49,28.0855,95.96,2564.5,1209.0,331.5,2896.0,1687.0,2896.0,6.6666665,10.0,2.4444444,14.0,4.0,6.0,5.0,3.0,0.6666667,6.0,3.0,5.0,153.83333,64.0,14.277778,175.0,111.0,154.0,1.83,0.86,0.35333332,2.16,1.3,2.16,1.5,1.0,0.5,2.0,1.0,1.0,0.33333334,2.0,0.5555556,2.0,0.0,0.0,3.1666667,5.0,1.8333334,5.0,0.0,5.0,4.6666665,14.0,6.2222223,14.0,0.0,0.0,9.666667,14.0,5.5555553,18.0,4.0,6.0,0.5,1.0,0.5,1.0,0.0,1.0,0.6666667,4.0,1.1111112,4.0,0.0,0.0,5.1666665,8.0,1.8888888,8.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,6.3333335,4.0,1.1111112,8.0,4.0,6.0,18.651667,6.51,2.9616666,22.2,15.69,15.69,0.12883334,0.773,0.21472222,0.773,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15517241,0.03448276,0.3275862,0.4827586,0.1688126,0.06862707 +51,11.738855,0.04166491,11.695588,11.847637,0.003343147,0.98974156,1.0063654,0.029632868,0.005898745,0.023892438,0.047369633,0.0102263205,0.6072909,19.263588,11.317425,5.656428,53.79687,48.140446,8.312749,4.479651,3.3690658,19.278124,15.909058,48.725983,28.964745,13.5311165,139.36884,125.837715,717.6528,385.27426,287.91156,1640.0803,1352.1687,1.27914,0.75527537,0.36375433,3.5793812,3.215627,1.0345927,0.5767408,0.3751882,2.5864582,2.2112699,10.344726,6.32625,2.4663746,31.311977,28.845604,0.21252875,0.15989359,0.0018187716,0.8521031,0.85028434,0.24709682,0.18822579,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.5262368,0.93341964,0.36375433,4.5793815,4.215627,3.49505,2.6262376,0.026697872,14.0,13.973302,4.5656495,2.931023,0.7694463,15.172916,14.40347,0.24709682,0.18822579,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,2.7113051,1.5307292,0.938924,6.9661455,6.027221,0.0,0.0,0.0,0.0,0.0,2.8541317,1.5524683,1.1236576,6.7593746,5.635717,1.8794067,1.29737,0.12549524,7.0548863,6.9293914,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.77055174,0.7523077,0.75003856,0.7500008,0.75,23.25,60.0,16.875,72.0,12.0,12.0,62.875,23.0,7.6875,68.0,45.0,68.0,52.535,154.185,42.345,178.49,24.305,24.305,1367.5,1973.0,666.75,2896.0,923.0,923.0,2.75,4.0,1.125,6.0,2.0,2.0,3.625,3.0,0.9375,6.0,3.0,3.0,146.875,34.0,8.8125,175.0,141.0,141.0,1.415,0.86,0.18625,2.16,1.3,1.31,1.875,1.0,0.21875,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.875,5.0,1.3125,5.0,0.0,0.0,1.75,14.0,3.0625,14.0,0.0,0.0,4.5,16.0,3.75,18.0,2.0,2.0,0.125,1.0,0.21875,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.625,8.0,2.4375,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.75,8.0,2.625,8.0,0.0,0.0,21.90375,7.2,1.5534375,22.89,15.69,22.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41666666,0.0,0.19444445,0.3888889,0.1688126,0.03626477 +52,8.566763,1.0236824,6.007557,9.206564,0.03624929,0.9093768,1.0226558,0.13929261,0.021519905,0.12584266,0.19309238,0.03137121,0.32625288,11.023897,6.5244765,6.9460993,27.335089,20.388988,1.8373162,1.0874127,1.1576833,4.555848,3.398165,30.32674,17.948835,19.108719,75.19883,56.09011,143.31067,84.81819,90.29929,355.35614,265.05685,0.73492646,0.4349651,0.46307328,1.8223393,1.3592659,0.36746323,0.21748255,0.23153664,0.91116965,0.67963296,7.716728,4.5671334,4.8622694,19.134562,14.272292,0.31601837,0.187035,0.19912152,0.7836059,0.58448434,0.36746323,0.21748255,0.23153664,0.91116965,0.67963296,0.0,0.0,0.0,0.0,0.0,1.1023897,0.65244764,0.69460994,2.7335088,2.038899,5.1444855,3.0447557,3.241513,12.756374,9.514862,4.409559,2.6097906,2.7784398,10.934035,8.155596,0.36746323,0.21748255,0.23153664,0.91116965,0.67963296,0.0,0.0,0.0,0.0,0.0,1.1023897,0.65244764,0.69460994,2.7335088,2.038899,0.0,0.0,0.0,0.0,0.0,0.73492646,0.4349651,0.46307328,1.8223393,1.3592659,2.3921857,1.4158114,1.5073036,5.931714,4.424411,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.82462114,0.80414516,0.8001562,0.800007,0.8000001,66.0,30.0,9.6,72.0,42.0,72.0,46.0,5.0,1.6,50.0,45.0,45.0,161.984,82.53,26.4096,178.49,95.96,178.49,2584.0,390.0,124.8,2896.0,2506.0,2506.0,4.4,2.0,0.64,6.0,4.0,4.0,5.8,1.0,0.32,6.0,5.0,6.0,170.8,21.0,6.72,175.0,154.0,175.0,1.472,0.86,0.2752,2.16,1.3,1.3,1.8,1.0,0.32,2.0,1.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,2.6,3.0,0.96,5.0,2.0,2.0,11.2,14.0,4.48,14.0,0.0,14.0,15.6,12.0,3.84,18.0,6.0,18.0,0.2,1.0,0.32,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.4,3.0,0.96,8.0,5.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,7.6,2.0,0.64,8.0,6.0,8.0,20.898,6.51,2.0832,22.2,15.69,22.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.115384616,0.0,0.16666667,0.71794873,0.1688126,0.054020032 diff --git a/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.npy b/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.npy new file mode 100644 index 0000000..12fd89a Binary files /dev/null and b/pysipfenn/tests/testCaseFiles/AdjusterTestDescriptors.npy differ diff --git a/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.csv b/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.csv new file mode 100644 index 0000000..e7fedf9 --- /dev/null +++ b/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.csv @@ -0,0 +1,53 @@ +Name, E_f [eV/atom] +1,0.11204700625000008 +2,4.0003046025 +3,-0.2837863701674114 +4,-0.3390229200000003 +5,-0.17997961958333317 +6,-0.0791259005555555 +7,0.004546606250000214 +8,-0.3278120625000002 +9,4.4154639174999994 +10,-0.9682736355555556 +11,-0.16495270666666784 +12,3.4404967875000008 +13,0.989534965 +14,-0.17180871000000053 +15,0.24268204250000025 +16,3.2851498774999985 +17,0.4826709725520833 +18,0.8575663433333327 +19,-0.15532792000000128 +20,3.5336190875 +21,3.4123461724999995 +22,-0.08306756583333315 +23,3.6542583874999996 +24,-0.26425716500000157 +25,0.0352760779999997 +26,-0.19337707321428713 +27,-0.6388277630555544 +28,-0.5461846583333335 +29,-0.7075618358333339 +30,-0.5300890800000015 +31,0.17000970687500017 +32,0.035313649999999086 +33,3.5496084325000012 +34,-0.11673426059523706 +35,-0.21550866821428574 +36,-0.14759064909598255 +37,-0.12148937464285682 +38,-0.22380490500000047 +39,-0.1422538353571444 +40,0.07023631500000072 +41,0.18032264750000015 +42,-0.36177632333333304 +43,-0.131107012857143 +44,-0.2944636875000004 +45,-0.17348738892857188 +46,-0.3059225313095238 +47,-0.6188369750000007 +48,1.1871788166666672 +49,-0.1811493189285715 +50,3.1963209299999997 +51,0.005689999999998936 +52,-0.6340091025000003 diff --git a/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.npy b/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.npy new file mode 100644 index 0000000..ef2a932 Binary files /dev/null and b/pysipfenn/tests/testCaseFiles/AdjusterTestTargets.npy differ diff --git a/pysipfenn/tests/test_ModelAdjusters.py b/pysipfenn/tests/test_ModelAdjusters.py new file mode 100644 index 0000000..3d7f4ed --- /dev/null +++ b/pysipfenn/tests/test_ModelAdjusters.py @@ -0,0 +1,194 @@ +import unittest +import pytest +import os +import pysipfenn +import torch +from importlib import resources + +# Skip the tests if we're in GitHub Actions and the models haven't been fetched yet +IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true" and os.getenv("MODELS_FETCHED") != "true" + +@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test depends on the ONNX network files") +class TestModelAdjusters(unittest.TestCase): + """ + Test all model adjusting features that can operate on the Calculator object. Note that this will require + the models to be downloaded and the environment variable MODELS_FETCHED to be set to true if running in GitHub + Actions. + + The setup will load the Krajewski2022_NN30 model and create an ``OPTIMADEAdjuster`` object for testing that is by + default connected to the Materials Project ``OPTIMADE`` server and looks for their GGA+U formation energies. In the + ``testFullRoutine`` test, the adjuster will be used to adjust the model to the Hf-Mo metallic system. The test will + cover almost all adjuster functionalities in different ways to hit all anticipated code paths. It also tests the + ``LocalAdjuster`` class for loading data from CSV and NPY files, which is a parent class of the ``OPTIMADEAdjuster``. + """ + + def setUp(self): + """ + Initialises the Calculator and ModelAdjuster objects for testing. + """ + self.c = pysipfenn.Calculator(autoLoad=False) + self.assertIsNotNone(self.c) + self.c.loadModels("SIPFENN_Krajewski2022_NN30") + self.assertIn('SIPFENN_Krajewski2022_NN30', self.c.loadedModels) + + self.ma = pysipfenn.OPTIMADEAdjuster(self.c, "SIPFENN_Krajewski2022_NN30") + + def testInit(self): + """ + Test that the OPTIMADEAdjuster object has been initialized correctly. + """ + self.assertEqual(self.ma.modelName, "SIPFENN_Krajewski2022_NN30") + self.assertIsInstance(self.ma.model, torch.nn.Module) + self.assertIsInstance(self.ma.calculator, pysipfenn.Calculator) + + self.assertEqual(len(self.ma.comps), 0) + self.assertEqual(len(self.ma.names), 0) + self.assertEqual(len(self.ma.validationLabels), 0) + + def testPlotExceptions(self): + """ + Test that the plot does not plot anything when no data is present. + """ + self.assertRaises(AssertionError, self.ma.plotStarting) + self.assertRaises(AssertionError, self.ma.plotAdjusted) + + def testFullRoutine(self): + """ + Test the full routine of the adjuster based on the default values pointing to Materials Project. Get the data + using OPTIMADE to adjust the model to Hf-Mo metallic system. Matrix search is reduced to 4 cases to speed up + the test and it is designed to explore all code paths in the search. The test will also check the highlighting + and plotting functionalities of the adjuster. + """ + self.ma.fetchAndFeturize( + 'elements HAS "Hf" AND elements HAS "Mo" AND NOT elements HAS ANY "O","C","F","Cl","S"', + parallelWorkers=4) + + self.ma.calculator.writeDescriptorsToCSV("KS2022", "AdjusterTestDescriptors.csv") + self.ma.calculator.writeDescriptorsToNPY("KS2022", "AdjusterTestDescriptors.npy") + + # Check highlighting and no-last-validation plotting + self.ma.highlightPoints([32, 23, 21, 22]) + self.ma.plotStarting() + + # Hyperparameter search. The 1e-8 is on purpose, so that the model does not converge and always improves after + # the first epoch. + self.ma.matrixHyperParameterSearch( + learningRates=[1e-8, 1e-3], + optimizers= ["Adam"], + weightDecays=[1e-4, 1e-5], + epochs=10 + ) + + self.ma.highlightPoints([0, 1, 2, 3]) + self.ma.highlightCompositions(["Hf", "Mo", "HfMo", "Hf50 Mo50", "Hf3Mo"]) + + self.ma.plotStarting() + self.ma.plotAdjusted() + + # Induce duplicates to test if they are handled + self.ma.fetchAndFeturize( + 'elements HAS "Hf" AND elements HAS "Mo" AND NOT elements HAS ANY "O","C","F","Cl","S"', + parallelWorkers=4) + + self.ma.adjust( + validation=0, + learningRate=1e-4, + epochs=10, + optimizer="Adamax", + weightDecay=1e-4, + lossFunction="MSE" + ) + + self.ma.names = [] + self.ma.plotStarting() + self.ma.plotAdjusted() + + def testDataLoading(self): + """ + Test the data loading functionality of the ``LocalAdjuster`` class (note, ``OPTIMADEAdjuster`` extends it). It + will test loading from both CSV and NPY files exported from the Calculator object. Note that CSV files have + names in the first column and headers in the first row, while NPY files are just the data arrays. It tests + implicit loading from the ``Calculator`` object as well. Lastly, it tests the error raising for unsupported + descriptors and data not matching the descriptor dimensions selected (an optional feature). + """ + + with resources.files('pysipfenn').joinpath('tests/testCaseFiles/') as testFileDir: + + # From CSV + self.lma1 = pysipfenn.LocalAdjuster( + self.c, + model="SIPFENN_Krajewski2022_NN30", + descriptorData=str(testFileDir.joinpath("AdjusterTestDescriptors.csv")), + targetData=str(testFileDir.joinpath("AdjusterTestTargets.csv")), + descriptor="KS2022" + ) + assert self.lma1 is not None + assert len(self.lma1.descriptorData) > 0 + assert len(self.lma1.targetData) > 0 + del self.lma1 + + # From NPY + self.lma2 = pysipfenn.LocalAdjuster( + self.c, + model="SIPFENN_Krajewski2022_NN30", + descriptorData=str(testFileDir.joinpath("AdjusterTestDescriptors.npy")), + targetData=str(testFileDir.joinpath("AdjusterTestTargets.npy")), + descriptor="KS2022" + ) + assert self.lma2 is not None + assert len(self.lma2.descriptorData) > 0 + assert len(self.lma2.targetData) > 0 + + self.c.descriptorData = self.lma2.descriptorData + + del self.lma2 + + # Implicit, from the Calculator + self.lma3 = pysipfenn.LocalAdjuster( + self.c, + targetData=str(testFileDir.joinpath("AdjusterTestTargets.csv")), + model="SIPFENN_Krajewski2022_NN30", + descriptor="KS2022", + ) + + # Error raising + with self.assertRaises(AssertionError): + self.lma4 = pysipfenn.LocalAdjuster( + self.c, + targetData=str(testFileDir.joinpath("AdjusterTestTargets.csv")), + model="SIPFENN_Krajewski2022_NN30", + descriptor="Ward2017", + ) + + with self.assertRaises(NotImplementedError): + self.lma5 = pysipfenn.LocalAdjuster( + self.c, + targetData=str(testFileDir.joinpath("AdjusterTestTargets.csv")), + model="SIPFENN_Krajewski2022_NN30", + descriptor="SomeCrazyDescriptor", + ) + + def testEndpointOverride(self): + """ + Test the endpoint override functionality of the ``OPTIMADEAdjuster`` class. It will test the override of the + endpoint and the data fetching from the new endpoint. + """ + endpoint = ["https://alexandria.icams.rub.de/pbesol"] + targetPath = ['attributes', '_alexandria_formation_energy_per_atom'] + + self.ma2 = pysipfenn.OPTIMADEAdjuster( + self.c, + model="SIPFENN_Krajewski2022_NN30", + endpointOverride=endpoint, + targetPath=targetPath) + + self.ma2.fetchAndFeturize( + 'elements HAS "Hf" AND elements HAS "Mo" AND elements HAS "Zr"', + parallelWorkers=2) + + self.assertGreaterEqual(len(self.ma2.comps), 0, "No compositions were found, thus no data was fetched.") + self.assertGreaterEqual(len(self.ma2.names), 0, "No names were found, thus no data was fetched.") + self.assertGreaterEqual( + len(self.ma2.descriptorData), 0, + "No descriptor data was found. If the other asserts passed, this is likely a bug in the featurization " + "or structural data has been made incompatible or otherwise corrupted.") \ No newline at end of file