Skip to content

Commit

Permalink
Merge branch 'main' into james/logging
Browse files Browse the repository at this point in the history
  • Loading branch information
WillNilges authored Apr 7, 2024
2 parents 5627caa + 47a9e22 commit 92991e4
Show file tree
Hide file tree
Showing 14 changed files with 160 additions and 61 deletions.
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
max-line-length = 120
extend-ignore = E203
per-file-ignores = **/__init__.py:F401,F403,src/meshapi_hooks/models.py:F401,F403
exclude = src/meshdb/utils/spreadsheet_import
34 changes: 7 additions & 27 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -250,31 +250,12 @@ We have a Celery job that runs hourly in production to back up to an S3 bucket.

To restore from a backup in production:

1. Delete the (presumably borked) database

```
docker compose down -v
```

2. Stand up a new database. This will also run meshdb, which will run the
migrations for you on startup.

```
docker compose up -d
```

3. Get a shell in the meshdb container
```
docker exec -it meshdb-meshdb-1 bash
```

4. Restart the postgres container, dropping the volume
1. Get a shell in the meshdb container
```
docker compose down -v postgres
docker compose up -d postgres
$ docker exec -it meshdb-meshdb-1 bash
```

5. Find the backup you want to restore
2. Find the backup you want to restore
```
root@eefdc57a46c2:/opt/meshdb# python manage.py listbackups
Name Datetime
Expand All @@ -287,15 +268,14 @@ default-12db99e5ec1d-2024-03-31-142422.psql.bin 03/31/24 14:24:22
default-bd0acc253775-2024-03-31-163520.psql.bin 03/31/24 16:35:20
```

6. Restore the backup
3. In a separate terminal, drop the old database
```
root@eefdc57a46c2:/opt/meshdb# python manage.py dbrestore -i default-bd0acc253775-2024-03-31-163520.psql.bin
$ echo 'drop database meshdb; create database meshdb;' | docker exec -i meshdb-postgres-1 psql -U meshdb -d postgres
```

Say yes

4. Restore the backup
```
Are you sure you want to continue? [Y/n] y
root@eefdc57a46c2:/opt/meshdb# python manage.py dbrestore -i default-bd0acc253775-2024-03-31-163520.psql.bin
```

**The Quick 'n Dirty Way**
Expand Down
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,11 @@ disallow_untyped_calls = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
warn_unused_configs = true
exclude = [
"^src/meshdb/utils/spreadsheet_import/.*",
"^src/meshapi/tests/.*",
"^src/meshapi_hooks/tests/.*",
]


[tool.black]
Expand Down
1 change: 0 additions & 1 deletion src/meshapi/management/commands/create_groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def handle(self, *args, **options):
"link",
"sector",
]
groups = ["admin", "installer", "readonly"]
all_permissions = Permission.objects.all()

admin, _ = Group.objects.get_or_create(name="Admin")
Expand Down
1 change: 0 additions & 1 deletion src/meshapi/models/member.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from typing import List

from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db.models.fields import EmailField
from django_jsonform.models.fields import ArrayField as JSONFormArrayField
Expand Down
26 changes: 26 additions & 0 deletions src/meshapi/static/widgets/panorama_viewer.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
.main-carousel {
width:100%;
min-height:fit-content;
max-height:300px;
}
.carousel-cell {
width: 100%; /* full width */
height: 200px;
/* center images in cells with flexbox */
display: flex;
align-items: center;
justify-content: center;
margin-right: 10px;
}

.carousel-cell img {
display: block;
}

.no-panos {
display:flex;
align-items: center;
justify-content: center;
border-radius: 5px;
}

20 changes: 16 additions & 4 deletions src/meshapi/templates/widgets/panorama_viewer.html
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
<div style='display:flex; flex-direction: row; width:100%; overflow-x:auto; max-height:300px; padding-left: 10px;'>
{% for panorama in widget.value %}
<img src="{{ panorama }}" height="300px"/>
{% endfor %}
<!--https://flickity.metafizzy.co/-->
<!--integrity generated with https://www.srihash.org/-->
<script src="https://unpkg.com/flickity@2/dist/flickity.pkgd.min.js" integrity="sha384-6ma6pGRLjeu6PkX+yOaR0dUGt6OQl9evbv3H2kdLWwIXtUztaQrWUlbRBPVpDH4B" crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://unpkg.com/flickity@2/dist/flickity.min.css">

<div class="main-carousel" data-flickity='{ "wrapAround": true }'>
{% if widget.value|length == 0 %}
<div class="no-panos">
<p>There are no panoramas.</p>
</div>
{% endif %}
{% for panorama in widget.value %}
<div class="carousel-cell">
<a href="{{ panorama }}" target="_blank"><img src="{{ panorama }}"/></a>
</div>
{% endfor %}
</div>
5 changes: 5 additions & 0 deletions src/meshapi/widgets.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,11 @@ def render(self, name, value, attrs=None, renderer=None):
template = super_template + pano_template
return mark_safe(template)

class Media:
css = {
"all": ("widgets/panorama_viewer.css",),
}


class DeviceIPAddressWidget(widgets.TextInput):
template_name = "widgets/ip_address.html"
25 changes: 17 additions & 8 deletions src/meshdb/utils/spreadsheet_import/csv_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ class SpreadsheetSector:
class DroppedModification:
original_row_ids: List[int]
new_row_id: int
row_status: str
deduplication_value: str
modified_property: str
database_value: str
Expand All @@ -109,12 +110,13 @@ class DroppedModification:

def get_spreadsheet_rows(
form_responses_path: str,
) -> Tuple[List[SpreadsheetRow], Dict[int, str]]:
) -> Tuple[List[SpreadsheetRow], List[SpreadsheetRow], Dict[int, str]]:
with open(form_responses_path, "r") as input_file:
csv_reader = csv.DictReader(input_file)

skipped_rows: Dict[int, str] = {}
nodes: List[SpreadsheetRow] = []
installs: List[SpreadsheetRow] = []
reassigned_nns: List[SpreadsheetRow] = []

for i, row in enumerate(csv_reader):
# Last row is placeholder
Expand All @@ -140,11 +142,16 @@ def get_spreadsheet_rows(
except ValueError:
abandon_date = None

node_status = SpreadsheetStatus(row["Status"].replace("dupe", "Dupe"))

re_assigned_as_nn = False
try:
nn = row["NN"].lower().strip()
re_assigned_as_nn = nn.startswith("x-")
nn = int(nn) if nn is not None and nn != "" and not re_assigned_as_nn else None
re_assigned_as_nn = nn.startswith("x-") or node_status == SpreadsheetStatus.nnAssigned
if re_assigned_as_nn:
nn = node_id
else:
nn = int(nn) if nn is not None and nn != "" else None
except (KeyError, ValueError):
nn = None

Expand All @@ -159,7 +166,7 @@ def get_spreadsheet_rows(
secondEmail=row["2nd profile email"].lower().strip(),
phone=row["Phone"],
roofAccess=row["Rooftop Access"] == "I have Rooftop access",
status=SpreadsheetStatus(row["Status"].replace("dupe", "Dupe")),
status=node_status,
installDate=install_date,
abandonDate=abandon_date,
nodeName=row["nodeName"],
Expand All @@ -181,12 +188,12 @@ def get_spreadsheet_rows(
continue

if re_assigned_as_nn:
skipped_rows[node_id] = "Reassigned as NN for another row"
reassigned_nns.append(node)
continue

nodes.append(node)
installs.append(node)

return nodes, skipped_rows
return installs, reassigned_nns, skipped_rows


def print_failure_report(skipped_rows: Dict[int, str], original_input_file: str, fname_overide: str = None) -> None:
Expand Down Expand Up @@ -234,6 +241,7 @@ def print_dropped_edit_report(
[
"OriginalRowID(s)",
"DroppedRowID",
"DroppedRowSpreadsheetStatus",
"DeduplicationValue",
"ModifiedProperty",
"DatabaseValue",
Expand All @@ -249,6 +257,7 @@ def print_dropped_edit_report(
new_fields = {}
new_fields["OriginalRowID(s)"] = ", ".join(str(row_id) for row_id in edit.original_row_ids)
new_fields["DroppedRowID"] = edit.new_row_id
new_fields["DroppedRowSpreadsheetStatus"] = edit.row_status
new_fields["DeduplicationValue"] = edit.deduplication_value
new_fields["ModifiedProperty"] = edit.modified_property
new_fields["DatabaseValue"] = edit.database_value
Expand Down
77 changes: 64 additions & 13 deletions src/meshdb/utils/spreadsheet_import/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import sys
import time
from collections import defaultdict
from typing import List
from typing import Dict, List

import django

Expand All @@ -16,6 +16,7 @@
django.setup()

from meshapi import models
from meshdb.utils.spreadsheet_import.building.constants import INVALID_BIN_NUMBERS
from meshdb.utils.spreadsheet_import.building.resolve_address import AddressParser
from meshdb.utils.spreadsheet_import.csv_load import (
DroppedModification,
Expand All @@ -29,7 +30,7 @@
from meshdb.utils.spreadsheet_import.parse_install import create_install, normalize_install_to_primary_building_node
from meshdb.utils.spreadsheet_import.parse_link import load_links_supplement_with_uisp
from meshdb.utils.spreadsheet_import.parse_member import get_or_create_member
from meshdb.utils.spreadsheet_import.parse_node import get_or_create_node, normalize_building_node_links
from meshdb.utils.spreadsheet_import.parse_node import get_node_type, get_or_create_node, normalize_building_node_links


def main():
Expand All @@ -51,20 +52,42 @@ def main():

form_responses_path, links_path, sectors_path = sys.argv[1:4]

rows, skipped = get_spreadsheet_rows(form_responses_path)
rows, reassigned_rows, skipped = get_spreadsheet_rows(form_responses_path)
logging.info(f'Loaded {len(rows)} rows from "{form_responses_path}"')

member_duplicate_counts = defaultdict(lambda: 1)

addr_parser = AddressParser()

dropped_modifications: List[DroppedModification] = []

max_install_num = max(row.id for row in rows)

start_time = time.time()
logging.info(f"Processing install # {rows[0].id}/{max_install_num}...")
try:
logging.info(f"Creating {len(reassigned_rows)} nodes for rows marked 'NN Reassigned'...")

nn_bin_map: Dict[int, int] = {}
for row in reassigned_rows:
node = models.Node(
network_number=row.id,
name=row.nodeName if row.nodeName else None,
latitude=row.latitude,
longitude=row.longitude,
altitude=row.altitude,
status=models.Node.NodeStatus.PLANNED, # This will get overridden later
type=get_node_type(row.notes) if row.notes else models.Node.NodeType.STANDARD,
notes=f"Spreadsheet Notes:\n"
f"{row.notes if row.notes else None}\n\n"
f"Spreadsheet Notes2:\n"
f"{row.notes2 if row.notes2 else None}\n\n",
)
node.save()
dob_bin = row.bin if row.bin and row.bin > 0 and row.bin not in INVALID_BIN_NUMBERS else None
if dob_bin:
nn_bin_map[node.network_number] = dob_bin

member_duplicate_counts = defaultdict(lambda: 1)

addr_parser = AddressParser()

dropped_modifications: List[DroppedModification] = []

max_install_num = max(row.id for row in rows)

start_time = time.time()
logging.info(f"Processing install # {rows[0].id}/{max_install_num}...")
for i, row in enumerate(rows):
if (i + 2) % 100 == 0:
logging.info(
Expand Down Expand Up @@ -146,6 +169,34 @@ def main():
for install in models.Install.objects.all():
normalize_install_to_primary_building_node(install)

# Confirm that the appropriate NN -> Building relations have been formed via the Install
# import that we would expect from the NN only rows
for nn, _bin in nn_bin_map.items():
node = models.Node.objects.get(network_number=nn)
building_match = node.buildings.filter(bin=_bin)
if not building_match:
logging.warning(
f"Warning, from NN data, expected NN{nn} to be connected to at least one building "
f"with DOB number {_bin} but no such connection was found. Adding it now..."
)
building_candidates = models.Building.objects.filter(bin=_bin)
if len(building_candidates) == 0:
logging.error(
f"Found no buildings with DOB BIN {_bin}, but this BIN is specified in "
f"spreadsheet row #{nn}. Is this BIN correct?"
)
continue
for building in building_candidates:
node.buildings.add(building)

for node in models.Node.objects.all():
if not node.installs.all():
# If we don't have any installs associated with this node, it is not
# active or planned, mark it as INACTIVE
logging.warning(f"Found node imported without installs (NN{node.network_number}), marking INACTIVE")
node.status = models.Node.NodeStatus.INACTIVE
node.save()

# Create an AP device for each access point install
load_access_points(rows)

Expand Down
Loading

0 comments on commit 92991e4

Please sign in to comment.