diff --git a/EMAILS.md b/EMAILS.md new file mode 100644 index 00000000..aad38446 --- /dev/null +++ b/EMAILS.md @@ -0,0 +1,60 @@ +# Mass Emails + +## MIT Systems + +We're using mailman -- in particular, it handles unsubscribing for us. + +### Lists we maintain + +We currently maintain the following lists on Mailman: +- `battlecode-interest-mm` is the general interest mailing list, linked to by the website homepage. This should be used for announcing new Battlecode contests. +- `battlecode-competitors-2020-mm` is an empty mailing list created by accident. It can be used for testing. +- `battlecode-competitors-2021-mm` contains all registered competitors of Battlecode 2021. + +### Creating a list + +1. Create a list at https://listmaker.mit.edu/lc/ and navigate to the admin page. You will receive an email containing the admin password. +1. Update the administrator to the Battlecode contact email (this is the publicly viewable list owner), and set yourself and others to be moderators. + After doing this, you will need to re-authenticate (you're no longer the admin), using the password that was emailed to you. +1. Set list parameters. Below are some examples. + - Description: MIT Battlecode 2021 + - Subject prefix: [battlecode-21] + - Hide sender = Yes. All sent emails will appear to come from the list, not you. + - Strip Reply-To header = Yes, explicit Reply-To = the battlecode address. This configures replies to not go back to the list. + - In the privacy menu: + - Only list admin should view subscription list. + - In sender submenu, new members should be moderated. This prevents random subscribers from sending unmoderated mail. + - In recipient submenu, ceiling should be zero. This allows us to send mail even if the subscription list is huge. + +### Adding to a list + +Before doing this, turn off welcome messages in the general menu (`send_welcome_msg`). This ensures we don't spam people when they're added. + +We use mmblanche for this. It's easiest to use it installed on Athena. SSH in, then run: +``` +bash # if your default shell is not bash +add consult +mmblanche +``` +to be able to use it. (`add consult` only works when running through bash, for some reason.) + +Get a list of emails, and convert it to the format as specified by mmblanche's `-f` argument: one email per line. (For example, if you're working with a Google Form, you can get a Google Sheet of responses. Create a new tab -- **not** the tab where form responses are being collected!. Convert it into only one column of values such that all the values are emails -- strip out timestamps, header row, and the like. Then download this as a csv. If you'd like to export emails from our database, you can use a nice interface to download a table as a csv, and work with it.) + +Move this file to your Athena locker; you can use `scp` for this (example [here](https://unix.stackexchange.com/questions/106480/how-to-copy-files-from-one-machine-to-another-using-ssh)). + +Finally run mmblanche! For example, `mmblanche [mailman-list-name] -al [path/to/file]`. Likely you'll want to use `-al`, as it simply adds any emails in the file to the list, skips over duplicates for you, and doesn't delete anything otherwise. + +You'll be prompted for the list admin password; find this somehow. You may wanna use the `-S` setting too, which will save admin passwords so you don't have to keep typing them. + +### Sending to a list + +If the set of emails you're trying to reach is continually growing (e.g. if you're messaging a large interest list through a gsheet that a keeps growing), then you'll probably want to update the lists first. Follow the instructions in the above section. + +Send mail to the list as you would normally. +- Ensure that you (yes, you specifically, not the Battlecode address) are a member of the list. Mark yourself as not-moderated in the Members list, or make sure you have a mod ready to release your email. +- Do not use Bcc. Bcc'd mail will be blocked by the list. +- Include trailing newlines. Mail without trailing newlines may show the message footer in the same line as your email signature. + +## Sendgrid + +(TODO dump some convos that I had w nate in Slack, as well as some messages that he sent in some channel.) diff --git a/backend/api/migrations/0019_scrimmage_winscore.py b/backend/api/migrations/0019_scrimmage_winscore.py new file mode 100644 index 00000000..023331de --- /dev/null +++ b/backend/api/migrations/0019_scrimmage_winscore.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.13 on 2020-12-27 02:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0018_auto_20200416_2354'), + ] + + operations = [ + migrations.AddField( + model_name='scrimmage', + name='winscore', + field=models.IntegerField(null=True), + ), + ] diff --git a/backend/api/migrations/0020_scrimmage_losescore.py b/backend/api/migrations/0020_scrimmage_losescore.py new file mode 100644 index 00000000..366d2d5b --- /dev/null +++ b/backend/api/migrations/0020_scrimmage_losescore.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.13 on 2020-12-27 17:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0019_scrimmage_winscore'), + ] + + operations = [ + migrations.AddField( + model_name='scrimmage', + name='losescore', + field=models.IntegerField(null=True), + ), + ] diff --git a/backend/api/models.py b/backend/api/models.py index 817fa499..f268aa9a 100644 --- a/backend/api/models.py +++ b/backend/api/models.py @@ -207,6 +207,8 @@ class Scrimmage(models.Model): # Match-running (completed by match runner) status = models.TextField(choices=SCRIMMAGE_STATUS_CHOICES, default='pending') + winscore = models.IntegerField(null=True) + losescore = models.IntegerField(null=True) replay = models.TextField(blank=True) # Metadata diff --git a/backend/api/pub.py b/backend/api/pub.py deleted file mode 100644 index 73fb6948..00000000 --- a/backend/api/pub.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import time -from google.cloud import pubsub_v1 - - -def get_callback(api_future, data, ref): - """Wrap message data in the context of the callback function.""" - def callback(api_future): - try: - print("Published message {} now has message ID {}".format( - data, api_future.result())) - ref["num_messages"] += 1 - except Exception: - print("A problem occurred when publishing {}: {}\n".format( - data, api_future.exception())) - raise - return callback - - -def pub(project_id, topic_name, data=""): - """Publishes a message to a Pub/Sub topic.""" - # [START pubsub_quickstart_pub_client] - # Initialize a Publisher client. - client = pubsub_v1.PublisherClient() - # [END pubsub_quickstart_pub_client] - # Create a fully qualified identifier in the form of - # `projects/{project_id}/topics/{topic_name}` - topic_path = client.topic_path(project_id, topic_name) - - # Data sent to Cloud Pub/Sub must be a bytestring. - #data = b"examplefuncs" - if data == "": - data = b"{\"gametype\":\"scrimmage\",\"gameid\":\"1\",\"player1\":\"examplefuncs\",\"player2\":\"examplefuncs\",\"maps\":\"maptestsmall\",\"replay\":\"abcdefg\"}" - - # Keep track of the number of published messages. - ref = dict({"num_messages": 0}) - - # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) - api_future.add_done_callback(get_callback(api_future, data, ref)) - - # Keep the main thread from exiting while the message future - # gets resolved in the background. - while api_future.running(): - time.sleep(0.5) - print("Published {} message(s).".format(ref["num_messages"])) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter - ) - parser.add_argument('project_id', help='Google Cloud project ID') - parser.add_argument('topic_name', help='Pub/Sub topic name') - - args = parser.parse_args() - - pub(args.project_id, args.topic_name) -# [END pubsub_quickstart_pub_all] diff --git a/backend/api/serializers.py b/backend/api/serializers.py index ad05878b..4f0b9325 100644 --- a/backend/api/serializers.py +++ b/backend/api/serializers.py @@ -155,7 +155,7 @@ class ScrimmageSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Scrimmage fields = ('url', 'id', 'league', 'red_team', 'red_mu', 'blue_team', 'blue_mu', 'ranked', - 'status', 'replay', 'requested_by', 'requested_at', 'started_at', 'updated_at', 'tournament_id') + 'status', 'winscore', 'losescore', 'replay', 'requested_by', 'requested_at', 'started_at', 'updated_at', 'tournament_id') read_only_fields = ('url', 'requested_at', 'started_at', 'updated_at') diff --git a/backend/api/views.py b/backend/api/views.py index a9768667..d40aa0d3 100644 --- a/backend/api/views.py +++ b/backend/api/views.py @@ -28,21 +28,12 @@ SUBMISSION_FILENAME = lambda submission_id: f"{submission_id}/source.zip" RESUME_FILENAME = lambda user_id: f"{user_id}/resume.pdf" -# pub sub commands (from pub.py) -def get_callback(api_future, data, ref): - """Wrap message data in the context of the callback function.""" - def callback(api_future): - try: - print("Published message {} now has message ID {}".format( - data, api_future.result())) - ref["num_messages"] += 1 - except Exception: - print("A problem occurred when publishing {}: {}\n".format( - data, api_future.exception())) - raise - return callback - -def pub(project_id, topic_name, data="", num_retries=5): +# NOTE: throughout our codebase, we sometimes refer to a pubsub as a "queue", adding a message to a pubsub as "queueing" something, etc. Technically this is not true: the pubsub gives no guarantee at all of a true queue or FIFO order. However, this detail of pubsub order is generally nonconsequential, and when it does matter, we have workarounds for non-FIFO-order cases. + +# Methods for publishing a message to a pubsub. +# Note that data must be a bytestring. +# Adapted from https://github.com/googleapis/python-pubsub/blob/master/samples/snippets/quickstart/pub.py +def pub(project_id, topic_name, data, num_retries=5): """Publishes a message to a Pub/Sub topic.""" # Repeat while this fails, because the data is already in the @@ -63,23 +54,9 @@ def pub(project_id, topic_name, data="", num_retries=5): # `projects/{project_id}/topics/{topic_name}` topic_path = client.topic_path(project_id, topic_name) - # Data sent to Cloud Pub/Sub must be a bytestring. - #data = b"examplefuncs" - if data == "": - data = b"sample pub/sub message" - - # Keep track of the number of published messages. - ref = dict({"num_messages": 0}) - # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) - api_future.add_done_callback(get_callback(api_future, data, ref)) - - # Keep the main thread from exiting while the message future - # gets resolved in the background. - while api_future.running(): - time.sleep(0.5) - # print("Published {} message(s).".format(ref["num_messages"])) + api_future = client.publish(topic_path, data) + message_id = api_future.result() except: pass else: @@ -140,7 +117,11 @@ def signed_upload_url(file_path, bucket): """ blob = GCloudUploadDownload.get_blob(file_path, bucket) - return blob.create_resumable_upload_session() + # Origin is necessary to prevent CORS errors later: + # https://stackoverflow.com/questions/25688608/xmlhttprequest-cors-to-google-cloud-storage-only-working-in-preflight-request + # https://stackoverflow.com/questions/46971451/cors-request-made-despite-error-in-console + # https://googleapis.dev/python/storage/latest/blobs.html + return blob.create_resumable_upload_session(origin=settings.THIS_URL) @staticmethod def signed_download_url(file_path, bucket): @@ -659,6 +640,7 @@ def create(self, request, team, league_id): if not serializer.is_valid(): return Response(serializer.errors, status.HTTP_400_BAD_REQUEST) + # Note that IDs are needed to generate the link. serializer.save() #save it once, link will be undefined since we don't have any way to know id serializer.save() #save again, link automatically set @@ -677,15 +659,6 @@ def create(self, request, team, league_id): upload_url = GCloudUploadDownload.signed_upload_url(SUBMISSION_FILENAME(serializer.data['id']), GCLOUD_SUB_BUCKET) - # The submission process is problematic: if the IDs are recorded, before the code is actually uploaded, then code that fails to upload will have dead IDs associated with it, and the team will be sad - # Also, if user navigates away before the upload_url is returned, - # then no code makes it into the bucket - # This is fixed(?) by uploading in the backend, - # or by uploading the file and then pressing another button to officialy submit - # The best way for now would be to have the upload, when done, - # call a function in the backend that adjusts sub IDs - # TODO somehow fix this problem - return Response({'upload_url': upload_url, 'submission_id': submission.id}, status.HTTP_201_CREATED) @@ -708,6 +681,42 @@ def retrieve_file(self, request, team, league_id, pk=None): return Response({'download_url': download_url}, status.HTTP_200_OK) + @action(methods=['patch', 'post'], detail=True) + def compilation_pubsub_call(self, request, team, league_id, pk=None): + # It is better if compile server gets requests for compiling submissions that are actually in buckets. + # So, only after an upload is done, the frontend calls this endpoint to give the compile server a request. + + # Only allow if superuser, or on the team of the submission + # Also make sure that the admin is on a team! Otherwise you may get a 403. + submission = self.get_queryset().get(pk=pk) + is_admin = User.objects.all().get(username=request.user).is_superuser + if not ((team == submission.team) or is_admin): + return Response({'message': 'Not authenticated on the right team, nor is admin'}, status.HTTP_401_UNAUTHORIZED) + + # If a compilation has already succeeded, keep as so; no need to re-do. + # (Might make sense to re-do for other submissions though, for example if messages are accidentally taken off the compilation pubsub queue.) + if submission.compilation_status == settings.COMPILE_STATUS.SUCCESS: + return Response({'message': 'Success response already received for this submission'}, status.HTTP_400_BAD_REQUEST) + # Only allow the admin to re-queue submissions, to prevent submission spam. + if (submission.compilation_status != settings.COMPILE_STATUS.PROGRESS) and (not is_admin): + return Response({'message': 'Only admin can attempt to re-queue submissions'}, status.HTTP_400_BAD_REQUEST) + + # indicate submission being in a bucket + submission.compilation_status = settings.COMPILE_STATUS.UPLOADED + submission.save() + + id = submission.id + # Notify compile server through pubsub queue. + data = str(id) + data_bytestring = data.encode('utf-8') + pub(GCLOUD_PROJECT, GCLOUD_SUB_COMPILE_NAME, data_bytestring) + + # indicate submission being queued + submission.compilation_status = settings.COMPILE_STATUS.QUEUED + submission.save() + + return Response({'message': 'Status updated'}, status.HTTP_200_OK) + @action(methods=['patch', 'post'], detail=True) def compilation_update(self, request, team, league_id, pk=None): is_admin = User.objects.all().get(username=request.user).is_superuser @@ -734,7 +743,8 @@ def compilation_update(self, request, team, league_id, pk=None): # Only if this submission is newer than what's already been processed, # update the submission history. # (to prevent reverting to older submissions that took longer to process) - if submission.id > team_sub.last_1_id: + # (The compile server should generally be processing submissions in the same order they were uploaded, anyways. But this check is still good in case of async conditions, re-queueing, etc) + if team_sub.last_1_id is None or submission.id > team_sub.last_1_id: team_sub.last_3_id = team_sub.last_2_id team_sub.last_2_id = team_sub.last_1_id team_sub.last_1_id = submission @@ -809,19 +819,6 @@ def team_compilation_status(self, request, team, league_id, pk=None): else: return Response({'status': None}, status.HTTP_200_OK) - @action(methods=['get'], detail=True) - def team_compilation_id(self, request, team, league_id, pk=None): - if pk != str(team.id): - return Response({'message': "Not authenticated"}, status.HTTP_401_UNAUTHORIZED) - - team_data = self.get_queryset().get(pk=pk) - comp_id = team_data.compiling_id - if comp_id is not None: - return Response({'compilation_id': comp_id}, status.HTTP_200_OK) - else: - # this is bad, replace with something thats actually None - return Response({'compilation_id': -1}, status.HTTP_200_OK) - class ScrimmageViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, @@ -1011,14 +1008,26 @@ def set_outcome(self, request, league_id, team, pk=None): return Response({'message': 'Scrimmage does not exist.'}, status.HTTP_404_NOT_FOUND) if 'status' in request.data: - sc_status = request.data['status'] + sc_status = request.data['status'] if sc_status == "redwon" or sc_status == "bluewon": + + if 'winscore' in request.data and 'losescore' in request.data: + sc_winscore = request.data['winscore'] + sc_losescore = request.data['losescore'] + else: + return Response({'message': 'Must include both winscore and losescore in request.'}, + status.HTTP_400_BAD_REQUEST) + + if sc_winscore <= sc_losescore: + return Response({'message': 'Scores invalid. Winscore must be at least half of total games.'}, status.HTTP_400_BAD_REQUEST) scrimmage.status = sc_status + scrimmage.winscore = sc_winscore + scrimmage.losescore = sc_losescore # if tournament, then return here if scrimmage.tournament_id is not None: scrimmage.save() - return Response({'status': sc_status}, status.HTTP_200_OK) + return Response({'status': sc_status, 'winscore': sc_winscore, 'losescore': sc_losescore}, status.HTTP_200_OK) # update rankings using elo # get team info @@ -1052,12 +1061,13 @@ def set_outcome(self, request, league_id, team, pk=None): lost.save() scrimmage.save() - return Response({'status': sc_status}, status.HTTP_200_OK) + return Response({'status': sc_status, 'winscore': sc_winscore, 'losescore': sc_losescore}, status.HTTP_200_OK) elif sc_status == "error": scrimmage.status = sc_status scrimmage.save() - return Response({'status': sc_status}, status.HTTP_200_OK) + # Return 200, because the scrimmage runner should be informed that it successfully sent the error status to the backend + return Response({'status': sc_status, 'winscore': None, 'losescore': None}, status.HTTP_200_OK) else: return Response({'message': 'Set scrimmage to pending/queued/cancelled with accept/reject/cancel api calls'}, status.HTTP_400_BAD_REQUEST) else: diff --git a/backend/backend_script.py b/backend/backend_script.py index c5d769e4..7d23e415 100644 --- a/backend/backend_script.py +++ b/backend/backend_script.py @@ -13,13 +13,20 @@ response = requests.post(domain + 'auth/token/', data=data) token = json.loads(response.text)['access'] +# data = { +# 'type': 'tour_scrimmage', +# 'tournament_id': '-1', +# 'player1': '917', +# 'player2': '919' +# } data = { - 'type': 'tour_scrimmage', - 'tournament_id': '-1', - 'player1': '917', - 'player2': '919' + 'status': 'error', + 'winscore': None, + 'losescore': None } headers = {"Authorization": "Bearer " + token} -response = requests.post(domain + 'api/match/enqueue/', data=data, headers=headers) +# response = requests.post(domain + 'api/match/enqueue/', data=data, headers=headers) +response = requests.patch(domain + 'api/0/scrimmage/1/set_outcome/', data=data, headers=headers) + print(response.text) diff --git a/backend/dev_settings.py b/backend/dev_settings.py index 26a32776..197a022f 100644 --- a/backend/dev_settings.py +++ b/backend/dev_settings.py @@ -40,6 +40,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True +THIS_URL = 'http://localhost:3000' # Application definition INSTALLED_APPS += ['debug_toolbar'] diff --git a/backend/docs/SETUP.md b/backend/docs/SETUP.md index 726ebe8d..29b705a0 100644 --- a/backend/docs/SETUP.md +++ b/backend/docs/SETUP.md @@ -52,7 +52,7 @@ Once the database is finished being created, connect to it with your Postgres ed Delete the contents of the following tables. (_Don't delete the tables themselves!_ To easily delete info, you can run a query, such as `DELETE FROM [table_name]`.) The tables are: `api_scrimmage`, `api_scrimmage_hidden`, `api_submission`, `api_team`, `api_team_users`, `api_teamsubmission`, `api_tournament`, `api_tournamentscrimmage`, `api_update`, `api_user`, `django_admin_log`. (You may have to delete them in a particular order. Particularly, if you get an error pertaining to a "foreign key constraint", you'll have to delete the table which uses it first. Deleting those tables is probably okay.) -Updating `api_league` is slightly different. Don't delete the entry; just edit it instead. Change `name` to something more suitable (eg `bh20`), change the `start_date` and `end_date` (they don't have to be exact, so feel free to use a longer range than the actual tournament. Set `active` to true. **Set `submissions_enabled` to false and `game_released` to false.** Finally `engine_version` needs to be changed as well; ask the infrastructure team what to change it to. +Updating `api_league` is slightly different. Don't delete the entry; just edit it instead. Change `name` to something more suitable (eg `bh20`), change the `start_date` and `end_date` (they don't have to be exact, so feel free to use a longer range than the actual tournament. **Set `active` to true. Set `submissions_enabled` to true. Set `game_released` to false.** Finally `engine_version` needs to be changed as well; ask the infrastructure team what to change it to. Next, we need to register a superuser account (for use by the infra). Run the battlecode website, and simply follow the normal account registration process. Take note of the password! Also, have this superuser create and join a team (this is necessary for some permissions). @@ -62,7 +62,18 @@ Then stop the old database (on its main page, press "stop"). ## Deployment Setup -Deployment is done through the Google Cloud Platform. You'll need access to the Google Cloud project. (If you don't have access already, ask a dev to add you.) With that, you can start here: +Deployment is done through the Google Cloud Platform. You'll need access to the Google Cloud project. (If you don't have access already, ask a dev to add you.) It's also helpful to install gsutil, a command line application for managing GCP. Link here: https://cloud.google.com/storage/docs/gsutil. + +With that, you can start here -- + +### Configuring Settings + +After registering a domain name for the competition, set `THIS_URL` (in `settings.py`) to that domain. + +### Storage Buckets +Go to "Storage" on GCP console. A bucket for submissions should have been created (if not, instructions are in the infrastructure readme.) +Set up the CORS policy, which allows us to upload to the bucket on external websites. Find `docs/cors,json`; in there, update the domain URLs listed. Then, run `gsutil cors set path/to/cors.json gs://bc21-submissions` (updating the bucket name) to whatever it is this year. +More info is here: https://cloud.google.com/storage/docs/configuring-cors#gsutil ### Cloud Build Triggers Go to "Cloud Build" triggers on GCP console, here: https://console.cloud.google.com/cloud-build/triggers?project=battlecode18 @@ -72,11 +83,15 @@ Change Dockerfile directory to `/backend`, and image name to `gcr.io/battlecode1 With this step done: on pushes to master, Google Cloud will create a Docker container with our latest code. Push a commit to master, to test that the trigger works! Under "Cloud Builds" -> "History" you can see the build in progress. +### Google Application Credentials +Infrastructure should have made a service account. Get the service account json file from an infra dev. (If they haven't done so yet, you can come back to this section later. Make sure to!) +Set the contents of this file into dev_settings_sensitive.py, as GOOGLE_APPLICATION_CREDENTIALS. Formatting is a little weird here -- you'll have to wrap the contents of the json file in `r'''` at the beginning, and `'''` at the end. See another version of the file for an example. + ### Instance Template From Google Cloud console, "Compute Engine" -> “Instance Templates”. Click on an old backend template, and then click on “Create similar”. Change the name to something descriptive enough and conventional. ("bc21-backend-template", for example, works well. Also I’ve found that including the current date and time in the name can help keep things straight.) For machine type, we've found the `n1-standard-n1` to be cheap and work well, especially providing enough memory. -Check the checkbox of "Deploy a container image to this VM instance", and change the container image to the image name you've just written in the cloud build trigger. -Then, click "Advanced container options" to see a place to set environment variables. In the repo's `backend/settings.py`, you can look at the `os.getenv` calls to see which environment variables are needed. Set these here, to the same values that have been used in local testing / in `dev_settings_sensitive.py`. (Other than `DB_HOST`, these probably don't need changing.) Note that these are un-editable; if you ever change environment variables, you'll have to make a new instance template. +Check the checkbox of "Deploy a container image to this VM instance", and change the container image to the image name you've just written in the cloud build trigger. +Then, click "Advanced container options" to see a place to set environment variables. Find the variables set in `dev_settings_sensitive.py`, and set all of those keys/values here, too. (Here, these values should not be enclosed in quotes.) Note that these are un-editable; if you ever change environment variables, you'll have to make a new instance template. See the "Deploying new instance template" for more info on this. (For now, keep the boot disk the same; it may be good to change it to a later version down the road. Be sure to test that the VMs still work, though.) @@ -107,6 +122,13 @@ Finally, click update! (Note: sometimes, after you try to update changes, they may not go through. This may be due to creating too many backend instances/buckets; we can only have so many up at any given time. You'll see notifications and any errors in the top right corner of the Google Console; you can check if this is the problem. If so, deleting old backend services/buckets is surprisingly hard. You need to first delete any uses of them in the host and path rules, then delete their uses in the "backend services" / "backend buckets" lists on the edit page's backend configuration section; don't forget to save. Then you need to _actually_ delete them, by using the gcloud command line interface. Instructions [here](https://cloud.google.com/sdk/gcloud/reference/compute/backend-services/delete) and [here](https://cloud.google.com/sdk/gcloud/reference/compute/backend-buckets/delete).) ### Some last steps +Make sure the CORS policy and Google Application credentials are all set up, as described earlier. In particular make sure that the Google Application credentials have been set up as an environment variable in the instance template, or create a new instance template with this set. Delete old instance groups: go to "Compute Engine" -> "Instance groups", check any old instance groups that are no longer in use, and click "delete". Delete old instance template: go to "Compute Engine" -> "Instance templates", check any old templates that are no longer in use, and click "delete". Delete old, unused backend services and buckets, if you're up to it, instructions in previous section. But this can be a pain and is certainly not necessary. + +## Deploying new instance template +Sometimes you'll have to change your instance template (for example, if you change an environment variable). To do so: +Create a new instance template (if you're looking to make just small changes, "Create Similar" on the original instance template's page is helpful here). +Click on your already-present instance group in use, and on its page, click "Edit Group". Find the "instance template" dropdown and change to the newly created instance template. +Finally, click on "rolling restart/replace". Change operation from `Restart` to `Replace`, let maximum surge be 1 and **maximum unavailable be 0** (we don't want our server to go down). Wait for the spinning icons to become checkmarks. diff --git a/backend/docs/cors.json b/backend/docs/cors.json new file mode 100644 index 00000000..de4be662 --- /dev/null +++ b/backend/docs/cors.json @@ -0,0 +1,17 @@ +[ + { + "method": [ + "GET", + "PUT" + ], + "origin": [ + "http://localhost:3000", + "http://2021.battlecode.org", + "https://2021.battlecode.org" + ], + "responseHeader": [ + "authorization", + "content-type" + ] + } +] diff --git a/backend/settings.py b/backend/settings.py index 4a071fce..53e65e16 100644 --- a/backend/settings.py +++ b/backend/settings.py @@ -104,6 +104,8 @@ class COMPILE_STATUS: SUCCESS = 1 FAIL = 2 ERROR = 3 + UPLOADED = 4 + QUEUED = 5 # Application definition @@ -272,4 +274,4 @@ class COMPILE_STATUS: # google cloud -GOOGLE_APPLICATION_CREDENTIALS = os.getenv('GOOGLE_APPLICATION_CREDENTIALS_JSON') +GOOGLE_APPLICATION_CREDENTIALS = os.getenv('GOOGLE_APPLICATION_CREDENTIALS') diff --git a/frontend/README.md b/frontend/README.md index 77bbd119..bb91e7ba 100755 --- a/frontend/README.md +++ b/frontend/README.md @@ -34,12 +34,22 @@ This automatically reloads the page on changes. To run the same thing without au When installing a new Node package, always `npm install --save ` or `npm install --save-dev `, and commit `package.json` and `package-lock.json`. This should work even if we run it from Docker. If you don't have `npm` installed on your computer, you can `docker exec -it battlecode20_frontend_1 sh` and then run the commands above. +Our local processes (including our dockerfile) use `npm start` and/or `npm run start`. These commands automatically use `.env.development`, and not `.env.production`. See here for more information: https://create-react-app.dev/docs/adding-custom-environment-variables/#what-other-env-files-can-be-used + ## Deployment For production, build with `npm run build` for the full thing, and `npm run buildnogame` to build the site without any game specific information. This is handled automatically by calling `./deploy.sh deploy` or `./deploy.sh deploynogame` using Bash, respectively. Note that the former should ONLY be called after the release of the game, since it makes the game specs and the visualizer public. +### access.txt + +During deployment, you'll need an up-to-date version of `frontend/public/access.txt`. This file is needed by game runners to run matches, and by competitors because it grants them access to downloading the GitHub package containing the engine. It's is really difficult to deploy; our solution is to have it deployed with the rest of the frontend code and onto our website, but have it never pushed to GitHub. Make sure you have an up-to-date copy! If you don't have one, check with the infra devs. + +### Assorted notes + Notably, the servers that serve the deployed frontend never run npm (through Docker or otherwise). Instead, our deploy script runs npm locally to build the frontend, and then sends this compiled version to Google Cloud. +Deployed code automatically builds using `.env.production`, since we call it with `npm run build`. See here for more information: https://create-react-app.dev/docs/adding-custom-environment-variables/#what-other-env-files-can-be-used + ### One-time setup #### AWS @@ -53,6 +63,8 @@ We first need to register the subdomain. This should create the subdomain `2021.battlecode.org` and point it to our load balancer. +With this new subdomain registered, make sure to update the URLs in `.env.production`to this new URL. + #### Google Cloud We now need to set up a new bucket, and set up the load balancer to point to the bucket when navigating to the right address. diff --git a/frontend/src/api.js b/frontend/src/api.js index 6683eb84..37dd960f 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -20,11 +20,11 @@ class Api { //uploads a new submission to the google cloud bucket static newSubmission(submissionfile, callback){ - // submissionfile.append('_method', 'PUT'); - // get the url from the real api - $.post(`${URL}/api/${LEAGUE}/submission/`, { - team: Cookies.get('team_id') - }).done((data, status) => { + // URLs which files are uploaded to are generated by the backend; + // call the backend api to get this link + $.post(`${URL}/api/${LEAGUE}/submission/`) + .done((data, status) => { + // Upload to the bucket console.log("got URL") Cookies.set('submission_id', data['submission_id']); $.ajax({ @@ -35,30 +35,26 @@ class Api { contentType: false }) .done((data, status) => { + // After upload is done, need to queue for compilation. + // See corresponding method of backend/api/views.py for more explanation. console.log(data, status) + $.post(`${URL}/api/${LEAGUE}/submission/` +Cookies.get('submission_id') + `/compilation_pubsub_call/`) + .done((data, status) => { + Cookies.set('upload_status_cookie', 11) + }) + .fail((xhr, status, error) => { + console.log("Error in compilation update callback: ", xhr, status, error) + Cookies.set('upload_status_cookie', 13) + }) }) - // Even when upload succeeds, an error is thrown... - // We make the dangerous assumption that the upload succeeded, - // ie that the submission exists in a bucket - // TODO this is a dangerous assumption, find a better solution - // (maybe revolving around the upload working error-free, - // and hooking callbacks to done rather than fail) - // TODO it's possible that the fail callback occurs - // before the upload finishes .fail((xhr, status, error) => { - // console.log(data); - $.post(`${URL}/api/${LEAGUE}/submission/` +Cookies.get('submission_id') + `/compilation_update/`, { - team: Cookies.get('team_id') - }).done((data, status) => { - console.log("Definitely done!") - // console.log(data, status) - Cookies.set('submitting', 0) - // TODO make this display done on screen - }) + console.log("Error in put request of file to bucket: ", xhr, status, error) + Cookies.set('upload_status_cookie', 13) }) - }).fail((xhr, status, error) => { - console.log("Error in post:", error) - + }) + .fail((xhr, status, error) => { + console.log("Error in post request for upload: ", xhr, status, error) + Cookies.set('upload_status_cookie', 13) }); } @@ -103,12 +99,6 @@ class Api { }); } - static getCompilationID(callback) { - $.get(`${URL}/api/${LEAGUE}/teamsubmission/${Cookies.get("team_id")}/team_compilation_id/`).done((data, status) => { - return data['compilation_id'] - }); - } - // note that this is a submission, not a teamsubmission, thing static getSubmissionStatus(callback) { $.get(`${URL}/api/${LEAGUE}/submission/${Cookies.get("submission_id")}/get_status/`).done((data, status) => { @@ -524,6 +514,14 @@ class Api { s[i].replay = undefined; } + if (s[i].status === 'won') { + s[i].score = `${s[i].winscore} - ${s[i].losescore}`; + } else if (s[i].status === 'lost') { + s[i].score = `${s[i].losescore} - ${s[i].winscore}`; + } else { + s[i].score = ' - '; + } + s[i].status = s[i].status.charAt(0).toUpperCase() + s[i].status.slice(1); s[i].date = new Date(s[i].updated_at).toLocaleDateString(); @@ -532,7 +530,6 @@ class Api { s[i].team = on_red ? s[i].blue_team : s[i].red_team; s[i].color = on_red ? 'Red' : 'Blue'; - requests.push(s[i]); } callback(requests); }); diff --git a/frontend/src/views/scrimmaging.js b/frontend/src/views/scrimmaging.js index eb3eeeed..8985a1e3 100755 --- a/frontend/src/views/scrimmaging.js +++ b/frontend/src/views/scrimmaging.js @@ -98,6 +98,7 @@ class ScrimmageHistory extends Component { Date Time Status + Score Team Ranked Replay @@ -122,6 +123,7 @@ class ScrimmageHistory extends Component { { s.date } { s.time } { stat_row } + { s.score } { s.team } { s.ranked ? "Ranked" : "Unranked"} { s.replay?Watch:N/A } diff --git a/frontend/src/views/submissions.js b/frontend/src/views/submissions.js index 350c579f..24db017d 100755 --- a/frontend/src/views/submissions.js +++ b/frontend/src/views/submissions.js @@ -13,6 +13,7 @@ class Submissions extends Component { super(props); this.state = { selectedFile: null, + currentSubmission: null, lastSubmissions: null, tourSubmissions: null, numLastSubmissions: 0, @@ -21,7 +22,7 @@ class Submissions extends Component { numTourLoaded: 0, user: {}, league: {}, - sub_status: -1 + upload_status: -1 }; Api.getUserProfile(function (u) { this.setState({ user: u }); @@ -30,7 +31,6 @@ class Submissions extends Component { } componentDidMount() { - Api.getCompilationStatus(this.gotStatus); Api.getTeamSubmissions(this.gotSubmissions); Api.getLeague(function (l) { this.setState({ league: l}); @@ -47,34 +47,44 @@ class Submissions extends Component { // makes an api call to upload the selected file uploadData = () => { - // let status_str = "Submitting..." - Cookies.set('submitting', 1) - // console.log("submitting...") - this.setState({sub_status: 0}) + // 'upload_status_cookie' in Cookies is used to communicate between the functions in api.js and those in submissions.js. It lets us keep track of the upload process for submissions, and all the http requests involved. (Note that this is different than a submission's compile_status in the database.) + // A value of 0 indicates that the submission is still in progress. + // When a submission finishes, api.js changes this value to something else. + Cookies.set('upload_status_cookie', 10) + // The upload_status state is used internally by this component. + // (Currently, it mirrors upload_status_cookie, but is part of state to make working with React easier.) + this.setState({upload_status: 10}) + + // Concurrent upload processes can be problematic; we've made the decision to disable concurrency. + // This is achieved by refreshing the submission upload components, which have buttons disabled while upload_status is 0. this.renderHelperSubmissionForm() this.renderHelperSubmissionStatus() Api.newSubmission(this.state.selectedFile, null) + // The method in api.js will change Cookies' upload_status_cookie during the process of an upload. + // To check changes, we poll periodically. this.interval = setInterval(() => { - if (Cookies.get('submitting') != 1) { - // console.log("out of time loop") + let upload_status_cookie_value = Cookies.get('upload_status_cookie'); + if (upload_status_cookie_value != 10) { + // Submission process terminated (see api.js). - // refresh the submission button and status - this.setState({sub_status: 1}) + // refresh the submission status, for use on this component + this.setState({upload_status: upload_status_cookie_value}) + + // refresh the submission button, etc, to allow for a new submission this.renderHelperSubmissionForm() this.renderHelperSubmissionStatus() - // refresh team submission listing + // refresh team submission tables, to display the submission that just occured Api.getTeamSubmissions(this.gotSubmissions); + this.renderHelperCurrentTable() this.renderHelperLastTable() + // Done waiting for changes to upload_status_cookie, so stop polling. clearInterval(this.interval) } - else { - // console.log("in time loop") - } - }, 1000); + }, 1000); // Poll every second } // change handler called when file is selected @@ -83,7 +93,7 @@ class Submissions extends Component { this.setState({ selectedFile: event.target.files[0], loaded: 0, - sub_status: -1 + upload_status: -1 }) this.renderHelperSubmissionForm() this.renderHelperSubmissionStatus() @@ -91,19 +101,15 @@ class Submissions extends Component { //---GETTING TEAMS SUBMISSION DATA---- + KEYS_CURRENT = ['compiling'] KEYS_LAST = ['last_1', 'last_2', 'last_3'] KEYS_TOUR = ['tour_final', 'tour_qual', 'tour_seed', 'tour_sprint', 'tour_hs', 'tour_intl_qual', 'tour_newbie'] - // called when status of teams compilation request is received - // 0 = in progress, 1 = succeeded, 2 = failed, 3 = server failed - gotStatus = (data) => { - this.setState(data) - } - // called when submission data is initially received // this will be maps of the label of type of submission to submission id // this function then makes calles to get the specific data for each submission gotSubmissions = (data) => { + this.setState({currentSubmission: new Array(this.submissionHelper(this.KEYS_CURRENT, data)).fill({})}) this.setState({lastSubmissions: new Array(this.submissionHelper(this.KEYS_LAST, data)).fill({})}) this.setState({tourSubmissions: new Array(this.submissionHelper(this.KEYS_TOUR, data)).fill([])}) } @@ -126,7 +132,13 @@ class Submissions extends Component { setSubmissionData = (key, data) => { let index, add_data - if (this.KEYS_LAST.includes(key)) { + if (this.KEYS_CURRENT.includes(key)) { + index = 0 + const arr = this.state["currentSubmission"] + let newArr = arr.slice(0, index) + newArr.push(data) + this.setState({["currentSubmission"]: newArr.concat(arr.slice(index + 1))}) + } else if (this.KEYS_LAST.includes(key)) { switch (key) { case 'last_1': index = 0 @@ -210,11 +222,12 @@ class Submissions extends Component { if (this.state.selectedFile !== null) { btn_class += " btn-info btn-fill" file_label = this.state.selectedFile["name"] - if (this.state.sub_status != 0) { + if (this.state.upload_status != 10) { button = } } - if (this.state.sub_status != 0) { + // Make sure to disable concurrent submission uploads. + if (this.state.upload_status != 10) { file_button_sub =
Choose File
file_button = @@ -241,7 +254,7 @@ class Submissions extends Component { {file_button} {file_button_2} {button} - {/*

{status_str}

*/} + {/*

{status_str}

*/} ) @@ -260,24 +273,23 @@ class Submissions extends Component { } } + // Shows the status of a current submission upload in progress. + // (see uploadData() for more explanation) renderHelperSubmissionStatus() { if (this.isSubmissionEnabled()) { let status_str = "" - switch (this.state.sub_status) { + switch (this.state.upload_status) { case -1: status_str = "Waiting to start submission..." break - case 0: + case 10: status_str = "Currently submitting..." break - case 1: - status_str = "Successfully submitted!" - break - case 2: - status_str = "Submission failed." + case 11: + status_str = "Successfully queued for compilation!" break - case 3: - status_str = "Internal server error. Try re-submitting your code." + case 13: + status_str = "Submitting failed. Try re-submitting your code." break default: status_str = "" @@ -287,14 +299,89 @@ class Submissions extends Component { return (
-

{status_str}

+

{status_str}

) } } - //reder helper for table containing the team's latest submissions + //reder helper for table containing the team's latest submission + renderHelperCurrentTable() { + if (this.state.currentSubmission === null) { + return ( +

+ Loading submissions...

+

+ ) + } else if (this.state.currentSubmission.length == 0) { + return ( +

+ You haven't submitted any code yet! +

+ ) + } else { + const submissionRows = this.state.currentSubmission.map((submission, index) => { + if (Object.keys(submission).length === 0) { + return ( +
Loading...
+ ) + } else { + let status_str = "" + let download_button = + switch (submission.compilation_status) { + case 0: + status_str = "Submission initialized, but not yet uploaded... If this persists, try re-submitting your code. Also, make sure to stay on this page." + download_button = "" + break + case 1: + status_str = "Successfully submitted and compiled!" + break + case 2: + status_str = "Submitted, but compiler threw a compile error. Fix and re-submit your code." + break + case 3: + status_str = "Internal server error. Try re-submitting your code." + break + case 4: + status_str = "Code uploaded, but not yet queued for compilation... If this persists, try re-submitting your code." + break + case 5: + // TODO a dedicated refresh button, that refreshes only these tables, would be cool + status_str = "Code queued for compilation -- check back and refresh for updates." + break + default: + status_str = "" + break + } + return ( + + { (new Date(submission.submitted_at)).toLocaleString() } + { status_str } + { download_button } + + ) + } + }) + + return ( + + + + + + + + + { submissionRows } + +
Submission atStatus
+ ) + } + + } + + //reder helper for table containing the team's latest successfully compiled submissions renderHelperLastTable() { if (this.state.lastSubmissions === null) { return ( @@ -303,19 +390,11 @@ class Submissions extends Component {

) } else if (this.state.lastSubmissions.length == 0) { - if (this.state.status == 0) { - return ( -

- Your code is being submitted -- you'll see it here if it finishes successfully. -

- ) - } else { - return ( -

- You haven't submitted any code yet! -

- ) - } + return ( +

+ You haven't had any successful submissions yet! (If you have code being submitted, you'll see it here if it finishes successfully.) +

+ ) } else { const submissionRows = this.state.lastSubmissions.map((submission, index) => { if (Object.keys(submission).length === 0) { @@ -406,7 +485,14 @@ class Submissions extends Component { { this.renderHelperSubmissionStatus() }
-

Latest Submissions

+

Latest Submission

+
+
+ { this.renderHelperCurrentTable() } +
+ +
+

Latest Successfully Compiled Submissions

{ this.renderHelperLastTable() } diff --git a/infrastructure/worker/app/game_server.py b/infrastructure/worker/app/game_server.py index 32848cd4..425d3d59 100755 --- a/infrastructure/worker/app/game_server.py +++ b/infrastructure/worker/app/game_server.py @@ -11,12 +11,14 @@ from google.cloud import storage -def game_report_result(gametype, gameid, result): +def game_report_result(gametype, gameid, result, winscore=None, losescore=None): """Sends the result of the run to the API endpoint""" try: auth_token = util.get_api_auth_token() response = requests.patch(url=api_game_update(gametype, gameid), data={ - 'status': result + 'status': result, + 'winscore': winscore, + 'losescore': losescore }, headers={ 'Authorization': 'Bearer {}'.format(auth_token) }) @@ -179,9 +181,9 @@ def game_worker(gameinfo): game_log_error(gametype, gameid, 'Could not determine winner') else: if wins[0] > wins[1]: - game_report_result(gametype, gameid, GAME_REDWON) + game_report_result(gametype, gameid, GAME_REDWON, wins[0], wins[1]) elif wins[1] > wins[0]: - game_report_result(gametype, gameid, GAME_BLUEWON) + game_report_result(gametype, gameid, GAME_BLUEWON, wins[1], wins[0]) else: game_log_error(gametype, gameid, 'Ended in draw, which should not happen')