-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.html
19460 lines (17968 loc) · 890 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Brookstone Rover Land & Sea</title>
<style>
/* Basic theme for remark slides */
html {
font-family: Helvetica, Arial, sans-serif;
}
body {
color: #ffffff;
background-color: #000000;
}
a {
color: #00989e;
}
a:hover, a:focus {
color: #00989e;
}
a:focus {
background-color: #000000;
color: #FFF;
}
strong {
color: red;
}
.remark-slide-content {
background-color: #000000;
}
.remark-slide-content h1 {
color: #00989e;
}
.remark-slide-content h2 {
color: #00989e;
}
.remark-slide-content h3 {
color: #00989e;
}
.remark-slide-content h4 {
color: #00989e;
}
.remark-slide-content h5 {
color: #00989e;
}
.remark-slide-content h6 {
color: #00989e;
}
.remark-code-line {
font-size: 10px;
white-space: pre;
background-color: #000;
border: 1px solid #000;
color: #fff;
padding: 0 1em;
}
div.my-header {
position: fixed;
top: 0px;
right: 5px;
height: 30px;
width: 100%;
text-align: right;
font-size: 10px;
opacity: 0.5;
}
</style>
</head>
<body>
<textarea id="source">
layout: true
<div class="my-header">March 2018</div>
---
## Brookstone Rover Land & Sea
![Rover image](images/LandSea_Rover.jpg?raw=true "LandSea Rover")
---
### Outline
- Land & Sea Rover details
- $100 waterproof floating wheeled rover from **Brookstone**
- WiFi control using an Android or iPhone app
- camera swivels from forward facing to pointing down (underwater)
- powered by 6 AA disposable batteries with no charging jack
- Python packages for previous Brookstone rovers
- Who created them and how?
- Discovering the current rover details
- Repeating some of the same steps
- Using new sources of info
- Beginnings of ROS integration
- Measuring the speed using a 3D scanner and the Point Cloud Library
---
### Land & Sea Rover details
<style>.image0{width:auto;height:auto;min-width: 0%;max-width: 100%}</style>
<style>.image10{width:10%;height:auto;}</style>
<style>.image20{width:20%;height:auto;}</style>
<style>.image30{width:30%;height:auto;}</style>
<style>.image40{width:40%;height:auto;}</style>
<style>.image45{width:45%;height:auto;}</style>
<style>.image50{width:50%;height:auto;}</style>
<style>.image60{width:60%;height:auto;}</style>
<style>.image70{width:70%;height:auto;}</style>
<style>.image80{width:80%;height:auto;}</style>
<style>.image90{width:90%;height:auto;}</style>
<style>.image100{width:100%;height:auto;}</style>
<style>.image150{width:150%;height:auto;}</style>
<style>.small_text{font-size: 12px; }</style>
<style>.tiny_text{font-size: 10px; line-height: 1; }</style>
<table width="100%">
<tr>
<td align="center">
<img src="images/App_screenshot.png" class="image80">
</td>
<td align="center">
<img src="images/NetworkManagerApp.png" class="image60"><br>
<font class="small_text">(Helpful to discover network)</font>
</td>
</tr>
<tr>
<td align="center">
Rover app
</td>
<td align="center">
Network Manager Pro app
</td>
</tr>
</table>
<img src="images/NetworkDiagram.png" class="image100">
---
### Python Packages for Previous Rovers
With commits on GitHub ranging from 2014 to 2016 at https://github.com/simondlevy/RoverPylot, the drivers for previous Brookstone Rovers were created by **Professor Simon Levy** who teaches computer science at Washington & Lee University in Virginia where he frequently creates open source APIs for various hardware, such as the Parrot AR.Drone and the Neato XV-11.
His repository has Python code to control both the Brookstone **Rover 2.0** and the **Rover Revolution**, optionally using a PS3 controller.
<table width="100%">
<tr>
<td align="center">
<img src="images/roverpylot.png">
</td>
<td align="center">
<img src="images/revolution.jpeg">
</td>
</tr>
</table>
---
#### Old drivers don't work for the new Land Sea Rover
<style>.small_pre{font-size: 12px; color: red;}</style>
<table width="100%">
<tr>
<td align="center">
<img src="images/OldDriverClasses.png" class="image60">
</td>
<td align="left">
This error immediately<br>appears:
<pre class="small_pre">reply = self._receiveCommandReply(82)
socket.error: [Errno 104]
Connection reset by peer</pre>
</td>
</tr>
<tr>
<td align="left">
<pre><code>
class Rover:
def __init__(self):
''' Creates a Rover object that you can communicate with.'''
self.HOST = '192.168.1.100'
self.PORT = 80
TARGET_ID = 'AC13'
TARGET_PASSWORD = 'AC13'
self.TREAD_DELAY_SEC = 1.0
self.KEEPALIVE_PERIOD_SEC = 60
# Create command socket connection to Rover
self.commandsock = self._newSocket()
# Send login request with four arbitrary numbers
self._sendCommandIntRequest(0, [0, 0, 0, 0])
# Get login reply
reply = self._receiveCommandReply(82)
</code></pre>
</td>
<td align="left">
Both driver classes inherit from the Rover class, and the __init__() method of Rover unconditionally starts by sending zeros to port 80 of the Rover.
</td>
</tr>
</table>
---
### Discovering the new Land Sea Rover details
Professor Levy has a very detailed blog post titled *"How I hacked the Brookstone Rover 2.0"* at https://isgroupblog.blogspot.com/2013/09/how-i-hacked-brookstone-rover-20.html where he describes using the following tools:
#### APK to JAR converter
- **dex2jar** is available at http://code.google.com/p/dex2jar/
- d2j-dex2jar.sh -f Rover_Land_Sea_v6.1.0.0.1_apkpure.com.apk
#### Java decompiler
- He used the free trial version of the decompiler from **SecureTeam** http://secureteam.net/Java-Decompiler.aspx
- java -jar d4j.jar
#### Wireshark
- Packet capture tool to show the actual messages being passed between the Android app and the Rover.
---
#### Using the Java decompiler on the new Land Sea Rover apk file
<img src="images/d4j_screenshot.png" class="image70">
- Searching for the word "backward" found the code that controls the wheels. It _**actually calls a cgi program**_ named ptz_control.cgi at the Rover's IP address.
- Connecting to that IP address with a browser shows the login screen for a webcam.
- Searching through the Java source shows that the username and password are 'admin' and ''.
---
#### Land Sea Rover webcam GUI at http://192.168.1.1
<img src="images/ipcam_screenshot2.png" class="image80">
PTZ Settings appears in the webcam GUI and it is used to set up patrol zones of pre-defined camera movements. Apparently it was hijacked for Rover control.
Seeing "sosocam" and "reecam" mentioned in the Java source code and in the webcam GUI leads to a web search that results in Shenzen Reecam, which is apparently a webcam software package widely used with drones...
---
#### Reecam and the Land Sea Rover
- The Reecam company maintains a documentation wiki that describes SOME of the API of ptz_control.cgi at http://wiki.reecam.cn/CGI/Controls . Other cgi commands like set_params, request_av, and get_log are also documented.
- There is a GitHub repository at https://github.com/larsks/mdcam with a Python tool named **mdcam**, which uses some of the Reecam interface to control MicroDrone 3.0 cameras.
- http://gw.tnode.com/drone/micro-drone-3-0-camera-api/ is a blog entry which points out that you can use the set_params command to enable **telnetd**, allowing you to log in to the Linux instance running on the Rover.
However, because some of the Rover operations are not documented by any of these sources, wireshark can help:
<table width="100%">
<tr>
<td align="center">
<img src="images/NetworkDiagramWireshark.png" class="image70">
</td>
</tr>
</table>
---
#### Wireshark and the Land Sea Rover
<img src="images/WiresharkScreenshot.png" class="image80">
Assuming your laptop was issued the IP 192.168.1.101 by the Rover, using the following filter:
<pre class="small_pre">
http.request.uri && ip.src == 192.168.1.101
</pre>
clearly shows the exact parameters being passed in the calls to **ptz_control.cgi** when using the webcam GUI (in this example) to rotate the camera UP. Other actions can also be captured by Wireshark and cross referenced against the decompiled Java code.
---
#### Motor Control for the Land Sea Rover
The Java code sends the undocumented command number **100** to **ptz_control.cgi** to control the motor speeds. It stores a 4 byte array to keep track of motion control:
- right wheels:
- **byte0** has the value 0 if the _right_ wheels should turn _backward_, and the value 1 if the _right_ wheels should turn _forward_
- **byte1** has the speed of the _right_ wheels, ranging from 0 indicating a stop to 255 being the highest speed.
- left wheels:
- **byte2** has the value 0 if the _left_ wheels should turn _backward_, and the value 1 if the _left_ wheels should turn _forward_
- **byte3** has the speed of the _left_ wheels, ranging from 0 indicating a stop to 255 being the highest speed.
To set the motor speed, the Java code assembles these 4 bytes into a single integer like this:
<pre class="small_pre">
(byte0 << 24) + (byte1 << 16) + (byte2 << 8) + byte3
</pre>
and sends **this integer** as the parameter value to **ptz_control.cgi** along with the command number **100** like this:
<pre class="small_pre">
http://192.168.1.1:80/ptz_control.cgi?param=17432837&command=100&pwd=&user=admin
</pre>
---
#### So, the basic Land Sea Rover driver architecture could be...
Like mdcam:
- the new driver uses the Python Requests library to make http calls and parse results
- there is no attempt to handle audio
Unlike mdcam:
- the new driver supports different specific commands due to the hardware
- slightly different logging so that integration with ROS can happen more easily
- mdcam uses the "click" library @click decorators to make methods available to the command line, while the new driver uses the inspect module to dynamically make driver methods command line accessible _(anyway, command line usage is not really important for a driver)_.
---
#### Logging in to the Land Sea Rover via telnet
Here is the output of **cat /proc/cpuinfo**
<pre class="small_pre">
Processor : ARM926EJ-S rev 5 (v5l)
BogoMIPS : 95.02
Features : swp half fastmult edsp java
CPU implementer : 0x41
CPU architecture: 5TEJ
CPU variant : 0x0
CPU part : 0x926
CPU revision : 5
Hardware : W55FA93
Revision : 0000
Serial : 0000000000000000
</pre>
Here is the beginning of the output of **cat /proc/kmsg**
<pre class="small_pre">
0 = 2
<4>div0 = 3
<4>div0 = 4
<4>Div1 = 0, Div0 = 3
<4>USBH IP Reset
<4>CONFIG_W55FA93_USB_HOST_LIKE_PORT1
<6>w55fa93-ohci w55fa93-ohci: Nuvoton W55FA93 OHCI Host Controller
<6>w55fa93-ohci w55fa93-ohci: new USB bus registered, assigned bus number 1
<6>w55fa93-ohci w55fa93-ohci: irq 18, io mem 0xb1009000
<4>ohci_w55fa93_start
<6>hub 1-0:1.0: USB hub found
<6>hub 1-0:1.0: 2 ports detected
<4>USB device plug in
</pre>
Searching for **"Nuvoton W55FA93"** gives...
---
#### Nuvoton and the Land Sea Rover
This PDF file: http://webshop.atlantikelektronik.de/Webpage/NuvotonPSG2011.pdf
shows:
<img src="images/Nuvoton_PDF.png" class="image80">
but it has no info about detecting battery charge levels. So a remotely operated Rover will die unexpectedly, but there's no way to recharge its 6 AA batteries anyway. It would be nice to have a 9v battery pack with Wi-Fi status reporting and Qi wireless recharging, maybe using a BeagleChip?
---
### ROS
- ROS is a range of **independent** software packages of cooperating **nodes** that run simlutaneously as **processes** on one or more computers to control a robot.
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
- ROS is a range of independent software packages of cooperating nodes that run simlutaneously as processes on one or more computers to control a robot.
- The nodes communicate via **messages** that are organized into topics. Nodes can **publish** messages to a topic, and any interested node can **subscribe** to a topic so that it can also receive and act on those messages. *(Oversimplified)*
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
- ROS is a range of independent software packages of cooperating nodes that run simlutaneously as processes on one or more computers to control a robot.
- The nodes communicate via messages that are organized into topics. Nodes can publish messages to a topic, and any interested node can subscribe to a topic so that it can also receive and act on those messages. *(Oversimplified)*
- The central **roscore** node acts as a name server and coordinates the other nodes.
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
- ROS is a range of independent software packages of cooperating nodes that run simlutaneously as processes on one or more computers to control a robot.
- The nodes communicate via messages that are organized into topics. Nodes can publish messages to a topic, and any interested node can subscribe to a topic so that it can also receive and act on those messages. *(Oversimplified)*
- The central roscore node acts as a name server and coordinates the other nodes.
- There are easy command line interfaces and GUI interfaces for seeing what messages are **flowing** to what nodes, and for seeing the **rate** and **volume** of messages so that you can avoid **bottlenecks**.
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
- ROS is a range of independent software packages of cooperating nodes that run simlutaneously as processes on one or more computers to control a robot.
- The nodes communicate via messages that are organized into topics. Nodes can publish messages to a topic, and any interested node can subscribe to a topic so that it can also receive and act on those messages. *(Oversimplified)*
- The central roscore node acts as a name server and coordinates the other nodes.
- There are easy command line interfaces and GUI interfaces for seeing what messages are flowing to what nodes, and for seeing the rate and volume of messages so that you can avoid bottlenecks.
- There are competing platforms like OROCOS which apparently has the reputation of better realtime performance at the expense of easy **interoperability** with many 3rd party nodes. There is also an up and coming ROS-2 which focuses on **realtime** and swarming systems.
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
- ROS is a range of independent software packages of cooperating nodes that run simlutaneously as processes on one or more computers to control a robot.
- The nodes communicate via messages that are organized into topics. Nodes can publish messages to a topic, and any interested node can subscribe to a topic so that it can also receive and act on those messages. *(Oversimplified)*
- The central roscore node acts as a name server and coordinates the other nodes.
- There are easy command line interfaces and GUI interfaces for seeing what messages are flowing to what nodes, and for seeing the rate and volume of messages so that you can avoid bottlenecks.
- There are competing platforms like OROCOS which apparently has the reputation of better realtime performance at the expense of easy interoperability with many 3rd party nodes. There is also an up and coming ROS-2 which focuses on realtime and swarming systems.
- ROS-Industrial is a ROS subset that focuses on factory machines.
<img src="images/ROS_diagram.png" class="image80">
---
### ROS
<img src="images/ROS_diagram.png" class="image80">
It might be nice to create a **ROS node** for Land & Sea Rover. At a minimum the Rover node would need to act on messages that set its direction and its speed in meters per second (on land). Meters per second needs to be **converted to an integer** from 0 to 255 that can be sent as a parameter to ptz_control.cgi
---
### ROS tips
- Use Ubuntu, since ROS releases are tied to Ubuntu Long Term Support releases. I used Ubuntu 16.04, which works fine in VirtualBox, and which AWS is using in RoboMaker.
- **DO NOT INSTALL FROM THE UBUNTU/DEBIAN STANDARD REPOSITORIES!** Instead, add the ROS repository and install from there:
<pre class="small_pre">
sudo
cat > /etc/apt/sources.list.d/ros-latest.list <<END
deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main
END
</pre>
<pre class="small_pre">
apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net \
--recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
apt-get update
</pre>
- I installed this ROS version: Kinetic Kame, again just like AWS RoboMaker.
- When recording bag files, always record the /tf (transform) topic as well, because without transform messages, ROS cannot know how to position your recorded data. Also record any other transform topics, just in case.
- Add --clock to "rosbag play" commands to avoid having ROS ignore all replayed messages because they are too far back in time. --clock makes rosbag the source of the current time.
---
#### Measuring the speed
4 ways to measure the speed:
- Tape measure and timer _(lowest speed is about 0.45 m/s)_
- Use a **strobe app** to measure the rotational speed of the wheels, and multiply by wheel circumference _(should start at about 5.4 Hz, based off tape measure)_
- Use an **audio spectrum analyzer app** to detect the rotational frequency of the wheels, and multiply by wheel circumference
- Use a **3d scanner** to route a realtime stream of point clouds into ROS as topic messages while the Rover travels through the scanner's field of view. ROS has easy facilities for capturing any stream of messages into a "bag" file for later examination.
- Then, you can read the bag data into **PCL**, the widely used **PointCloud library**, and isolate the Rover by taking differences between the frames.
- Finally, find the centroid of the remaining points for each frame (the remaining points are the Rover), and easily get the speed, because the frames are timestamped.
---
#### Using a strobe app: Strobily on Android
<img src="images/Rover_strobe.gif" class="image80">
- Discovered that sending parameter values greater than ABOUT 10 _**did not**_ increase the rotational frequency!
- Rotational frequency ranges from **5.5 Hz** for the parameter value **1**, up to about **9.3 Hz** for the parameter value **10**.
- Wheel circumference is 3.25" giving speeds from **0.45 m/s** to **0.77 m/s**
*(The frame rate of the gif is not high enough to accurately show the spinning)*
---
#### Using a spectrum analyzer app: Spectrum Analyzer on Android
<img src="images/spectrum_analyzer/collage.png" class="image100">
3 snapshots are shown above, but 10 were taken (1 for each parameter value from 1 to 10). Red graph is a rolling ~5 second max, green is instantaneous spectrum.
Typing the "Peak" numbers into **LibreOffice Calc** gives...
---
#### Using a spectrum analyzer app: massaging the data in LibreOffice
<img src="images/spectrum_analyzer/spreadsheet2.png" class="image80">
- Apparently, judging by sounds, the max rotational frequency seems to be **1.4** times the min frequency.
- But judging by strobe, the max rotational frequency seemed to be **1.7** times the min frequency.
- The parameter values close to 10 produce very similar rotational frequencies.
- Let's assume parameter value 1 causes a rotational frequency of 5.5 Hz as per the measurement by strobe.
---
#### Using a spectrum analyzer app: curve fitting in LibreOffice
Right clicking on the LibreOffice chart to add a trend line gives the equation shown:
<img src="images/spectrum_analyzer/instantaneous_scaled_log.png" class="image70">
- Rotational frequency as a function of CGI parameter value x
- **f(x) = 0.97 ln(x) + 5.6** (in Hz) _goes from 5.5 to 7.8 Hz_
- speed as a function of CGI parameter value x
- **s(x) = 0.08255 * ( 0.97 ln(x) + 5.6 )** (in m/s) _goes from 0.46 to 0.65 m/s_
---
#### Using a 3d scanner: Tango ROS Streamer App for Android v 1.3.1
<img src="images/tango_ros_streamer_diagram.png" class="image90">
- Install the Tango ROS Streamer App onto a Tango capable phone
- Configure it to talk to the roscore at 192.168.1.101 (the laptop)
- On the laptop, start **roscore** and then record the data with rosbag:
- **rosbag record /tf /tf_static /tango/point_cloud -O rover.bag**
- Message topics /tf and /tf_static are required by rviz to orient the pointclouds
---
#### Using a 3d scanner: primitive rover scan visualization with ROS RViz
<img src="images/rviz2/rviz_anim.gif" class="image100">
- After recording is done, play the recorded data back in a loop (-l) with rosbag
- **rosbag play -l --clock rover.bag** _(without --clock, rviz rejects as too old)_
- Visualize it with rviz (subscribe to /tf, /tf_static, and /tango/point_cloud)
- **rosrun rviz rviz**
---
#### Using a 3d scanner: manipulating the rover scans with Python
Using rosbag_to_ply.py, read the pointcloud messages into Python from the rosbag file and write each one out as an ASCII PLY file that many programs can read:
```python
import sys, os, rosbag
import sensor_msgs.point_cloud2 as pc2
bagfile_path = sys.argv[1]
ply_topic = sys.argv[2]
ply_dir = bagfile_path + ".ply.d"
bag = rosbag.Bag(bagfile_path)
if not os.path.isdir(ply_dir):
os.mkdir(ply_dir)
for msg_topic, msg, msg_time in bag.read_messages(topics=[ply_topic]): # Loop over all messages in bag for this topic
ply_file_path = os.path.join(ply_dir, "%s.%s.ply"%(msg_topic.replace("/", "_"), str(msg_time)))
with open(ply_file_path, "w") as ply_file: # Make a new PLY file for each message
ply_file.write("ply\nformat ascii 1.0\nelement vertex %d\n"%msg.width)
ply_file.write("property float x\nproperty float y\nproperty float z\nproperty float c\n")
ply_file.write("end_header\n")
for vertex in pc2.read_points(msg, skip_nans = True): # Loop over all vertices in message
ply_file.write("%f %f %f %f\n"%vertex) # Write a new line for each vertex
```
Run it like this:
**rosbag_to_ply.py   2018-04-01-13-35-34.bag   /tango/point_cloud**
...to make files named like this, where the number is the pointcloud timestamp:
**2018-04-01-13-35-34.bag.ply.d/_tango_pointcloud.1522026206919802769.ply**
---
#### Using a 3d scanner: lame rover scan visualization with CloudCompare
Open the PLY files in the CloudCompare application _("snap install cloudcompare")_:
<img src="images/CloudCompare.png" class="image100">
It is very slow, and sadly it uses trackball style manipulators. Cumbersome.
- I opened 42 pointclouds with about 20,000 points each.
---
#### Using a 3d scanner: lame rover scan visualization with MeshLab
Open the PLY files in the MeshLab application _("apt-get install meshlab")_:
<img src="images/MeshLab.png" class="image100">
Also slow, and unfortunately with a trackball style manipulator again.
- Also 42 pointclouds with about 20,000 points each.
---
#### Using a 3d scanner: better rover scan visualization with Blender
Open the PLY files in Blender:
<img src="images/Blender.png" class="image100">
Much faster after loading the same files, plus it defaults to turntable navigation. Tabbing into edit mode and selecting a vertex on the rover shows it is 1.9 meters from the camera, which is correct. But there are too many other vertices besides rover vertices. Let's use the Point Cloud Library to remove them...
---
### PCL - Point Cloud Library
- Like OpenCV but for 3D; started as a part of ROS, now independent
- C++ native API, but partial Python API available from Strawlab
- Modules for I/O, filters, feature extraction, visualization, partitioning by octree and kdtree (k-dimensional tree)
- octrees are great for comparing pointclouds and finding differences - just what we need!
- But the API for converting ROS PointCloud2 messages to PCL Pointclouds is not straightforward in Python
Building the PCL library
- Download release v0.3.0rc1 from https://github.com/strawlab/python-pcl/releases
- <pre class="small_pre">
tar xzvf python-pcl-0.3.0rc1.tar.gz
</pre>
- <pre class="small_pre">
cd python-pcl-0.3.0rc1
</pre>
- <pre class="small_pre">
python setup.py build_ext -i
</pre>
- <pre class="small_pre">
python setup.py install
</pre>
---
#### Loading PCL from a PLY file:
```python
>>> import pcl
>>> pc_from_ply = pcl.load('_tango_point_cloud.1522604147114629819.ply')
>>> pc_from_ply.sensor_orientation , pc_from_ply.sensor_origin
(array([ 1., 0., 0., 0.]), array([ 0., 0., 0., 0.], dtype=float32))
>>> pc_from_ply.size , pc_from_ply.height , pc_from_ply.width , pc_from_ply.is_dense
(21040, 1, 21040, False)
>>> pc_from_ply[0]
(-0.8809239864349365, -0.9873319864273071, 2.2988979816436768)
>>> numpy_to_array = pc_from_ply.to_array()
>>> numpy_to_array
array([[-0.86157399, -0.978643 , 2.27972507], ...,
[ 0.088902 , 0.081311 , 0.186314 ]], dtype=float32)
>>> len(numpy_to_array)
21040
>>> numpy_to_array.shape
(21040, 3)
>>> pc_from_array = pcl.PointCloud()
>>> pc_from_array.from_array(numpy_to_array)
>>> pc_from_array.sensor_orientation , pc_from_array.sensor_origin
(array([ 1., 0., 0., 0.]), array([ 0., 0., 0., 0.], dtype=float32))
>>> pc_from_array.size , pc_from_array.height , pc_from_array.width , pc_from_array.is_dense
(21040, 1, 21040, True)
>>> pc_from_array[0]
(-0.8615739941596985, -0.978643000125885, 2.2797250747680664)
```
---
#### Loading PCL from ROS messages: C++ has pcl::fromROSMsg(), not Python!
```python
>>> import rosbag, pcl, numpy
>>> bag = rosbag.Bag('2018-04-01-13-35-34.bag') ; get = bag.read_messages( topics=['/tango/point_cloud'])
>>> msg_topic , msg , msg_time = get.next() ; msg.height, msg.width
(1, 21040)
>>> dtype_list = [ (f.name, numpy.float32) for f in msg.fields ] ; dtype_list
[('x', <type 'numpy.float32'>), ('y', <type 'numpy.float32'>), ('z', <type 'numpy.float32'>),
('c', <type 'numpy.float32'>)]
>>> numpy_arr_fromstring_data = numpy.fromstring( msg.data, dtype_list ) ; numpy_arr_fromstring_data.shape
(21040,)
>>> numpy_arr_fromstring_data # We need to drop the color column 'c'
array([(-0.8615744709968567, -0.9786433577537537, 2.2797253131866455, 0.5714285969734192), ...,
(0.08890211582183838, 0.08131082355976105, 0.18631358444690704, 1.0)],
dtype=[('x', '<f4'), ('y', '<f4'), ('z', '<f4'), ('c', '<f4')])
>>> dropped_c = numpy.lib.recfunctions.rec_drop_fields(numpy_arr_fromstring_data, ['c']) ; dropped_c
rec.array([(-0.8615744709968567, -0.9786433577537537, 2.2797253131866455), ...,
(0.08890211582183838, 0.08131082355976105, 0.18631358444690704)],
dtype=[('x', '<f4'), ('y', '<f4'), ('z', '<f4')])
>>> view_array = dropped_c.view((dropped_c.dtype[0], len(dropped_c.dtype.names))) ; view_array
rec.array([[-0.86157447, -0.97864336, 2.27972531], ...,
[ 0.08890212, 0.08131082, 0.18631358]], dtype=float32)
>>> view_array.shape , view_array[0] # (N, 3) is the right shape for PCL
((21040, 3), array([-0.86157447, -0.97864336, 2.27972531], dtype=float32))
>>> pc_from_array = pcl.PointCloud() ; pc_from_array.from_array(view_array)
>>> pc_from_array.sensor_orientation , pc_from_array.sensor_origin
(array([ 1., 0., 0., 0.]), array([ 0., 0., 0., 0.], dtype=float32))
>>> pc_from_array.size , pc_from_array.height , pc_from_array.width , pc_from_array.is_dense
( 21040, 1, 21040, True)
>>> pc_from_array[0]
(-0.8615744709968567, -0.9786433577537537, 2.2797253131866455)
```
---
#### Loading PCL: Which method is better?
- **plys_to_pcls.py** loads from PLY files, and it reports:
<pre class="small_pre">
64 pointclouds with an average of 21037 points per cloud
Original memory size: 62246912 bytes
Final memory size: 84439040 bytes
Memory growth: 22192128 bytes
Elapsed time: 3.30724596977 seconds
(346752 bytes per pointcloud, 0.051676 seconds per pointcloud)
Total available RAM: 2625716224 bytes
2 CPUS like this: Intel(R) Core(TM) i5 CPU 650 @ 3.20GHz
</pre>
- **rosbag_to_pcls.py   2018-04-01-13-35-34.bag   /tango/point_cloud** <br>loads from ROS messages, and it reports:
<pre class="small_pre">
64 pointclouds with an average of 21037 points per cloud
Original memory size: 100044800 bytes
Final memory size: 124366848 bytes
Memory growth: 24322048 bytes
Elapsed time: 0.080677986145
(380032 bytes per pointcloud, 0.00126059353352 seconds per pointcloud)
Total available RAM: 2625716224 bytes
2 CPUS like this: Intel(R) Core(TM) i5 CPU 650 @ 3.20GHz
</pre>
- Loading straight from ROS messages is about 40 times faster, and uses nearly the same RAM per pointcloud.
---
#### Using a 3d scanner: Identifying moving vertices between frames
PCL octrees can easily identify points that changed between frames:
<pre class="small_pre">
PYTHON-PCL-SOURCE-DIR/examples/official/octree/octree_change_detection.py
</pre>
But adjacent frames that contain slow motion could mis-identify overlapping points from a moving object as being stationary, especially if octree resolution is grainy to overlook jitter. We must give time for the moving object to get out of the way.
<table width="100%">
<tr>
<td align="center">
<img src="images/overlapping_moving_vertices/annotated_overlapping_moving_vertices.gif" class="image90">
</td>
<td width="40%">So, compare every frame to the nearest J adjacent frames (not just the immediately adjacent frames), and keep a per-frame, per-point tally of how many times each point was identified as a moving point.<br><br>
At the end, points that were never considered to be moving points are truly stationary points.
</td>
</tr>
</table>
After the NxJ octree comparisons, cull stationary points from each frame to be left with only moving points in each frame, then calculate centroid deltas across frames.
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br>
- Pandas, Parquet, AWS Athena, AWS Cloud9 custom dialogs and menus, Jupyter, JupyterHub, Cufflinks, IPython SQLMagic... <br>
- statsmodels and matplotlib <img src="images/Least_Squares_by_weekday.png" class="image100"><br>
- Facebook's fbprophet forecasting library <img src="images/rowcounts_by_date.png" class="image100"><br>
<br>
<br>
... and ...
</font>
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br>
- MayaVI <br> <video src="images/Package_shipments.mp4" controls preload></video><br>
- Jupyter and Cufflinks
</font>
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br>
</font>
Plotly 3D charts of the AWS Athena PrestoDB product
<a target="_blank" href="20180805_athena_query_chart_masked.html"><img title="(click for more)" src="images/Plotly_3D_Athena.png" class="image100"></a>
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br></font>
<p class="tiny_text">
<font class="tiny_text">
A year or so before the recent fantastic JupyterLab git plugins were available, I combined existing classic Jupyter
git plugins along with my own: <b>Resync Git Subtree,</b> <b>Update Git Subtree</b>, and <b>REVERT FILE to Git</b> allow full or partial updates of a user's personal git working tree, which is configured to be at
the beginning of the user's PYTHONPATH, allowing users to selectively override shared code with their own test code, before checking in changes to the shared code. <b>DIFF two notebooks</b> invokes <b>nbdiff</b> on
two arbitrary notebooks that were selected with checkmarks in the Jupyter file tree.<br>
<br><img src="images/git_maintain_jupyter_plugin.png" class="image100">
<br><br>A 3rd <b>history</b> button was added to the standard <b>nbdime</b> buttons in the Jupyter notebook toolbar...<br>
<img src="images/nbdiff_plugin_modifications.png" class="image0" width="332" height="118">
...which leads to the simple gitweb GUI that comes with git:<br><br>
<img src="images/checkin_history.png" class="image90">
</font></p>
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br>
</font>
IPyWidgets for N*N-way diffs between SQL schemas or AWS IAM policies across multiple sub-accounts:
<font class="small_text"><br>Using Python difflib edit distance (similar to Levenshtein distance) to color the icons on a ramp from 0.0 = green to 1.0 = red<br><br>
Clicking on a colored icon populates the diff display below the interactive 3D chart.</font><br>
<br><br><a target="_blank" href="images/screenshot_of_ddl_diff_3d_heatmap.png"><img title="(click for more)" src="images/screenshot_of_ddl_diff_3d_heatmap.png" class="image45"></a>
<a target="_blank" href="images/screenshot_of_iam_policy_diff_3d_heatmap.png"><img title="(click for more)" src="images/screenshot_of_iam_policy_diff_3d_heatmap.png" class="image45"></a>
---
<font class="small_text">
I'm sorry, there should be more, but I got distracted by other fun stuff:<br>
</font>
Running ETL through Jupyter notebooks in batch mode on AWS spot instances under Airflow:
<font class="small_text"><br>Using the Papermill library with a custom engine to only execute cells with the tag <b>"batch"</b>, each notebook gets executed as an Airflow <b>Task</b> on its own AWS EC2 spot instance.</font><br><br>
<br><br><a target="_blank" href="Background_info_for_ascii_processing_using_sql.html"><img title="(click for more)" src="Background_info_for_ascii_processing_using_sql.d/our_jupyter_airflow_screenshot1.png" class="image90"></a>
</textarea>
<script>require=(function(e,t,n){function i(n,s){if(!t[n]){if(!e[n]){var o=typeof require=="function"&&require;if(!s&&o)return o(n,!0);if(r)return r(n,!0);throw new Error("Cannot find module '"+n+"'")}var u=t[n]={exports:{}};e[n][0].call(u.exports,function(t){var r=e[n][1][t];return i(r?r:t)},u,u.exports)}return t[n].exports}var r=typeof require=="function"&&require;for(var s=0;s<n.length;s++)i(n[s]);return i})({"components/printing":[function(require,module,exports){
module.exports=require('yoGRCZ');
},{}],"yoGRCZ":[function(require,module,exports){
var EventEmitter = require('events').EventEmitter
, styler = require('components/styler')
;
var LANDSCAPE = 'landscape'
, PORTRAIT = 'portrait'
, PAGE_HEIGHT = 681
, PAGE_WIDTH = 908
;
function PrintComponent () {}
// Add eventing
PrintComponent.prototype = new EventEmitter();
// Sets up listener for printing
PrintComponent.prototype.init = function () {
var self = this;
this.setPageOrientation(LANDSCAPE);
if (!window.matchMedia) {
return false;
}
window.matchMedia('print').addListener(function (e) {
self.onPrint(e);
});
};
// Handles printing event
PrintComponent.prototype.onPrint = function (e) {
var slideHeight;
if (!e.matches) {
return;
}
this.emit('print', {
isPortrait: this._orientation === 'portrait'
, pageHeight: this._pageHeight
, pageWidth: this._pageWidth
});
};
PrintComponent.prototype.setPageOrientation = function (orientation) {
if (orientation === PORTRAIT) {
// Flip dimensions for portrait orientation
this._pageHeight = PAGE_WIDTH;
this._pageWidth = PAGE_HEIGHT;
}
else if (orientation === LANDSCAPE) {
this._pageHeight = PAGE_HEIGHT;
this._pageWidth = PAGE_WIDTH;
}
else {
throw new Error('Unknown print orientation: ' + orientation);
}
this._orientation = orientation;
styler.setPageSize(this._pageWidth + 'px ' + this._pageHeight + 'px');
};
// Export singleton instance
module.exports = new PrintComponent();
},{"events":1,"components/styler":"syTcZF"}],"components/slide-number":[function(require,module,exports){
module.exports=require('GSZq7a');
},{}],"GSZq7a":[function(require,module,exports){
module.exports = SlideNumberViewModel;
function SlideNumberViewModel (slide, slideshow) {
var self = this;
self.slide = slide;
self.slideshow = slideshow;
self.element = document.createElement('div');
self.element.className = 'remark-slide-number';
self.element.innerHTML = formatSlideNumber(self.slide, self.slideshow);
}
function formatSlideNumber (slide, slideshow) {
var format = slideshow.getSlideNumberFormat()
, slides = slideshow.getSlides()
, current = getSlideNo(slide, slideshow)
, total = getSlideNo(slides[slides.length - 1], slideshow)
;
if (typeof format === 'function') {
return format.call(slideshow, current, total);
}
return format
.replace('%current%', current)
.replace('%total%', total);
}
function getSlideNo (slide, slideshow) {
var slides = slideshow.getSlides(), i, slideNo = 0;
for (i = 0; i <= slide.getSlideIndex() && i < slides.length; ++i) {
if (slides[i].properties.count !== 'false') {
slideNo += 1;
}
}
return Math.max(1, slideNo);
}
},{}],2:[function(require,module,exports){
// shim for using process in browser
var process = module.exports = {};
process.nextTick = (function () {
var canSetImmediate = typeof window !== 'undefined'
&& window.setImmediate;
var canPost = typeof window !== 'undefined'
&& window.postMessage && window.addEventListener
;
if (canSetImmediate) {
return function (f) { return window.setImmediate(f) };
}
if (canPost) {
var queue = [];
window.addEventListener('message', function (ev) {
var source = ev.source;
if ((source === window || source === null) && ev.data === 'process-tick') {
ev.stopPropagation();
if (queue.length > 0) {
var fn = queue.shift();
fn();
}
}
}, true);
return function nextTick(fn) {
queue.push(fn);
window.postMessage('process-tick', '*');
};
}