-
Notifications
You must be signed in to change notification settings - Fork 2
/
ftnexport.py
1399 lines (1106 loc) · 46.8 KB
/
ftnexport.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Message flow from database to links and users
"""
import xml.etree.ElementTree
import io
import traceback
import time
import zipfile
import os
import functools
import json
from ftnconfig import suitable_charset, find_link, \
ADDRESS, PACKETTHRESHOLD, BUNDLETHRESHOLD, get_addr_id, get_addr, DOUTBOUND, addrdir, connectdb, \
EXPORTLOCK, PKTLOCK, BUNDLELOCK, TICLOCK, get_link_packing, HOSTNAME, \
BUNDLETIMELIMIT, PACKETTIMELIMIT
import ftnimport
import ftn.msg
import ftn.attr
from ftn.ftn import FTNFail, FTNWrongPassword, date_to_RFC3339
from stringutil import *
import postgresql.alock
import ftnaccess
import ftntic
import lo
get_time = time.time
# AUTOCOMMIT assumed
def get_subscribers(db, target, onlyvital=False):
""" query all subscribers including that who are subscribed to group of the target """
for r in db.prepare("""
with recursive all_groups(id, level) as
(
select $1::BIGINT, 0
union
select a.group, g.level+1 from addresses a, all_groups g where a.id = g.id
)
select s.subscriber, s.vital, g.level from subscriptions s, all_groups g
where s.target = g.id""" + (" and s.vital = True" if onlyvital else ""))(target):
yield r
def get_addrtree(db, addr):
for a in db.prepare("""
with recursive all_nested(id, level) as
(
select $1::BIGINT, 0
union
select a.id, n.level+1 from addresses a, all_nested n where a.group = n.id
)
select id from all_nested where level>0""")(addr):
yield a
def get_subscriber_messages_n_trashy(db, subscriber, domain):
""" get all subscribed addresses in specified domain and
fetch all messages with id>lastsent or if lastsent is None - with processed==0
Non-vital netmail subscription should be processed as echomail """
try:
query = db.Q_get_subscriber_messages_n
except AttributeError:
query = db.Q_get_subscriber_messages_n = db.prepare("""
with recursive allsubscription(id, target, dir) as
(
select s.id, s.target, 1 from subscriptions s, addresses a
where s.subscriber=$1 and s.vital=TRUE and s.target=a.id and a.domain=$2
Union
select s.id, a.id, 0 from allsubscription s, addresses a
where a.group = s.target
and (select count(id) from subscriptions where target=a.id) = 0
and a.domain = $2
)
select m.id, m.source, m.destination, m.msgid, m.header, m.body, m.origcharset, m.receivedfrom
from allsubscription alls, messages m
where m.processed=0 and m.destination=alls.target
order by m.id
;
""")
for m in query(subscriber, domain):
yield m
def get_subscriber_messages_n(db, subscriber, domain): # loopy
""" get all subscribed addresses in specified domain and
fetch all messages with id>lastsent or if lastsent is None - with processed==0
Non-vital netmail subscription should be processed as echomail """
# To evade this subgroup recursive scanning we must maintain table with pairs of msg-addr-(groupaddr - additional element for easy updating)
# where addr is all addr that are group for the msg dest address and so on recursive
# but this table must be rebuilt when grouping changes
# OR track changes of addresses' group field and add/delete pairs accordingly
for subs_id, subs_target, is_direct in db.prepare("""
with recursive allsubscription(id, target, dir) as
(
select s.id, s.target, 1 from subscriptions s, addresses a
where s.subscriber=$1 and s.vital=TRUE and s.target=a.id and a.domain=$2
Union
select s.id, a.id, 0 from allsubscription s, addresses a
where a.group = s.target
and (select count(id) from subscriptions where target=a.id) = 0
and a.domain = $2
)
select * from allsubscription """)(subscriber, domain):
# print (db.prepare("select text from addresses where id=$1").first(subs_target), db.prepare("select count(*) from messages where destination=$1 and processed=0").first(subs_target))
for m in db.prepare("""select m.id, m.source, m.destination, m.msgid, m.header, m.body, m.origcharset, m.receivedfrom from messages m
where m.processed=0 and m.destination=$1""")(subs_target):
yield m
def get_direct_messages(db, subscriber):
try:
query = db.Q_get_direct_messages
except AttributeError:
query = db.Q_get_direct_messages = db.prepare("""
select m.id, m.source, m.destination, m.msgid, m.header, m.body, m.origcharset, m.receivedfrom
from messages m
where m.processed=8 and m.destination=$1
order by m.id
;
""")
for m in query(subscriber):
yield m
# SET cpu_tuple_cost = 0.2; set enable_hashjoin=false; set enable_mergejoin=false;
def get_subscriber_messages_e_heavy(db, subscriber, domain):
""" get all subscribed addresses in specified domain and
fetch all messages with id>lastsent or if lastsent is None - with processed==0 """
try:
query = db.Q_get_subscriber_messages_e
except AttributeError:
# seems checking that nobody is subscribed to subgroup is not needed for echomail
# and should be removed
query = db.Q_get_subscriber_messages_e = db.prepare("""
with recursive allsubscription(id, lastsent, target, dir) as
(
select id, lastsent, target, 1 from subscriptions where subscriber=$1
Union
select s.id, s.lastsent, a.id, 0 from allsubscription s, addresses a
where a.group = s.target
)
select m.id, m.source, m.destination, m.msgid, m.header, m.body, m.origcharset, m.receivedfrom, alls.id
from allsubscription alls, addresses sa, messages m
where sa.id=alls.target and sa.domain=$2 and
m.id>alls.lastsent and m.destination=alls.target and
m.receivedfrom<>$1 and m.processed<>5
order by m.id
;
""")
for m in query(subscriber, domain):
yield m
#id_msg, src, dest, msgid, header, body, recvfrom, withsubscr
def get_subscriber_messages_e(db, subscriber, domain):
start_time = get_time()
for target_id, target_name, target_last, subs_id, subs_last in get_subscriptions_x(db, subscriber, domain):
if get_time()-start_time>BUNDLETIMELIMIT-5:
print("Abandoning do to export time limit")
break
if target_last>subs_last:
print ("something new", target_id, target_name, target_last, subs_id, subs_last)
for mid,msrc,mdst,mmsgid,mhdr,mbody,mchr,mrecvfrom,mproc in db.prepare(
"select m.id, m.source, m.destination, m.msgid, m.header, "
"m.body, m.origcharset, m.receivedfrom, m.processed from messages m where destination=$1 and id>$2 order by id")(target_id, subs_last):
yield mid, msrc, mdst, mmsgid, mhdr, mbody, mchr, mrecvfrom, subs_id, mproc
def _get_messages(db, dest_id, lastsent):
""" use lastsent >= -1 for fetching echomail
or lastsent = None for netmail """
try:
Q_get_subscription_messages_e = db.Q_get_subscription_messages_e
Q_get_subscription_messages_n = db.Q_get_subscription_messages_n
except AttributeError:
Q_get_subscription_messages_e = db.Q_get_subscription_messages_e = db.prepare(
"select m.id, s.domain, s.text, m.msgid, m.header, m.body, m.receivedfrom "
"from messages m, addresses s "
"where m.id>$2 and m.destination=$1"
"and m.source=s.id")
Q_get_subscription_messages_n = db.Q_get_subscription_messages_n = db.prepare(
"select m.id, s.domain, s.text, m.msgid, m.header, m.body, m.receivedfrom "
"from messages m, addresses s "
"where m.destination=$1 and m.processed=0"
"and m.source=s.id")
if lastsent is not None:
# echomail-style
for m in Q_get_subscription_messages_e(dest_id, lastsent):
yield m[0], db.FTN_backdomains[m[1]], m[2], m[3], m[4], m[5], m[6]
else:
# netmail-style
for m in Q_get_subscription_messages_n(dest_id):
yield m[0], db.FTN_backdomains[m[1]], m[2], m[3], m[4], m[5], m[6]
return
def update_subscription_watermark(db, subscription, id):
db.prepare("update subscriptions set lastsent=$1 where id=$2")(id, subscription)
def denormalize_message(orig, dest, msgid, header, body, charset, echodest=None, addvia=None, addseenby=[], addpath=None):
(origdom, origaddr) = orig
(destdom, destaddr) = dest
if charset is None:
# not imported message
overwriteCHRS = True
charset = suitable_charset(None, None, "encode", origdom, origaddr, destdom, destaddr) or 'utf-8'
else:
overwriteCHRS = False
#print(charset)
if origdom!="node":
raise FTNFail("message source must be node not %s"%origdom)
msg=ftn.msg.MSG()
nltail=len(body)
while(nltail>0 and body[nltail-1]=="\n"):
nltail-=1
try:
fname=(header.find("sendername").text or '').encode(charset)
tname=(header.find("recipientname").text or '').encode(charset)
msg.subj=(unclean_str(header.find("subject").text or '')).encode(charset)
msg.date=(header.find("date").text or '').encode(charset)
msg.body = body[:nltail].encode(charset).split(b"\n")
except UnicodeEncodeError:
try:
charset = "cp437"
fname=(header.find("sendername").text or '').encode(charset)
tname=(header.find("recipientname").text or '').encode(charset)
msg.subj=(unclean_str(header.find("subject").text or '')).encode(charset)
msg.date=(header.find("date").text or '').encode(charset)
msg.body = body[:nltail].encode(charset).split(b"\n")
except UnicodeEncodeError:
charset = "utf-8"
fname=(header.find("sendername").text or '').encode(charset)
tname=(header.find("recipientname").text or '').encode(charset)
msg.subj=(unclean_str(header.find("subject").text or '')).encode(charset)
msg.date=(header.find("date").text or '').encode(charset)
msg.body = body[:nltail].encode(charset).split(b"\n")
#print(fname, tname, subj, date)
#print(xml.etree.ElementTree.tostring(header, encoding="utf-8").decode("utf-8"))
ftnheader=header.find("FTN")
# print(xml.etree.ElementTree.tostring(ftnheader).decode("utf-8"))
msg.kludge = {}
for kludge in ftnheader.findall("KLUDGE"):
#print(kludge.get("name"), kludge.get("value"))
msg.kludge[kludge.get("name").encode(charset)] = kludge.get("value").encode(charset)
# overwrite CHRS kludge
if overwriteCHRS:
if charset=="fido_relics":
if b"CHRS:" in msg.kludge:
del msg.kludge[b"CHRS:"]
elif charset!="utf-8":
msg.kludge[b"CHRS:"] = (charset.upper() + " 2").encode("ascii")
else:
msg.kludge[b"CHRS:"] = (charset.upper() + " 4").encode("ascii")
# print(msg.kludge)
msg.via = [] # netmail only
for via in ftnheader.findall("VIA"):
for viak, viav in via.attrib.items():
msg.via.append((viak.encode(charset), viav.encode(charset)))
if addvia:
msg.via.append((b"Via", addvia.encode(charset)))
# print(msg.via)
msg.path = []
msg.seenby = set() # echomail only
if destdom=="echo":
dest_zone=ftn.addr.str2addr(echodest)[0]
my_zone=ftn.addr.str2addr(addpath)[0] # addpath should be this node's address
for zpth in ftnheader.findall("ZPTH"):
zpth_record = zpth.get("record")
msg.add_zpth(zpth_record)
if my_zone==dest_zone:
for path in ftnheader.findall("PATH"):
#print(path.get("record"))
msg.add_path(path.get("record"))
if addpath:
#print("additional path", addpath)
pathaddr=ftn.addr.str2addr(addpath)
if not pathaddr[3]:
msg.add_path(addpath)
else:
for path in ftnheader.findall("PATH"):
pathaddr=ftn.addr.str2addr(path.get("record"))
msg.add_zpth(ftn.addr.addr2str((my_zone, pathaddr[1], pathaddr[2], None)))
if addpath:
#print("additional path", addpath)
pathaddr=ftn.addr.str2addr(addpath)
if not pathaddr[3]:
msg.add_zpth(ftn.addr.addr2str((my_zone, pathaddr[1], pathaddr[2], None)))
if my_zone==dest_zone:
for seenby in ftnheader.findall("SEEN-BY"):
seenbyaddr = (seenby.get("zone"), seenby.get("net"), seenby.get("node"), seenby.get("point"))
#print(seenbyaddr)
msg.add_seenby(ftn.addr.addr2str(seenbyaddr))
else:
pass # drop old seen-by's
for seenby in addseenby:
#print("additional seenby", seenby)
sbaddr=ftn.addr.str2addr(seenby)
if sbaddr[0]==dest_zone and not sbaddr[3]:
msg.add_seenby(seenby)
msg.orig=(fname, ftn.addr.str2addr(origaddr))
if destdom=="node":
msg.dest=(tname, ftn.addr.str2addr(destaddr))
msg.area=None
elif destdom=="echo":
#print("packing echomail msg to "+echodest)
msg.dest=(tname, ftn.addr.str2addr(echodest))
msg.area=destaddr.encode(charset)
else:
raise FTNFail("do not know how to pack message to "+destdom)
msg.attr=0
if destdom=="node":
attrs=[]
for attr in ftnheader.findall("ATTR"):
attrs.append(attr.get("id"))
try:
msg.attr = ftn.attr.text_to_binary(attrs)
except:
traceback.print_exc()
msg.cost=0
msg.readcount=0
msg.replyto=0
msg.nextreply=0
#print(msg.__dict__)
return msg, charset
# ---
class filecommitter:
def __init__(self, filename):
self.filename = filename
def show(self):
print ("file committer:", self.filename)
def commit(self):
print ("commit file", repr(self.filename))
os.unlink(self.filename)
class netmailcommitter:
def __init__(self, newstatus=2):
self.newstatus=newstatus
self.msglist=set()
self.msgarqlist=[]
self.db = connectdb()
def __del__(self):
self.db.close()
def show(self):
print ("netmail committer:", self.newstatus, self.msglist)
def add(self, d):
if type(d) is netmailcommitter:
if self.msglist.intersection(d.msglist):
raise Exception("double export of netmail message")
self.msglist.update(d.msglist)
self.msgarqlist.extend(d.msgarqlist)
else:
if d[0] in self.msglist:
raise Exception("double export of netmail message")
self.msglist.add(d[0])
if d[1]:
self.msgarqlist.append(d[1])
def commit(self):
try:
with ftnimport.session(self.db) as sess:
for addr, name, deliverto, msg, charset in self.msgarqlist:
print("send audit request to", addr)
sess.send_message(("node", ADDRESS), "Audit tracker", addr, name, None, "Audit tracking response", """
This reply confirms that your message has been successfully delivered
to node %s
*******************************************************************************
%s
*******************************************************************************
"""%(deliverto, msg.as_str(shorten=True).decode(charset)))
except:
print("error sending ARq reply")
traceback.print_exc()
for msg in self.msglist:
self.db.prepare("update messages set processed=$2 where id=$1")(msg,self.newstatus)
print("commit msg #%d"%msg)
self.msglist=set()
self.msgarqlist=[]
class echomailcommitter:
def __init__(self):
self.lasts = {} # subscription: lastsent
self.db = connectdb()
def __del__(self):
self.db.close()
def add_one(self, k, v):
if k in self.lasts and v<=self.lasts[k]:
raise Exception("non-monotonic echomail export")
self.lasts[k] = v
def add(self, d):
if type(d) is echomailcommitter:
for k, v in d.lasts.items():
self.add_one(k, v)
else:
self.add_one(*d)
def commit(self):
for k, v in self.lasts.items():
self.db.prepare("update subscriptions set lastsent=$1 where id=$2")(v, k)
print("commit subscription %d up to message #%d"%(k, v))
self.lasts = {}
def show(self):
print("echomail committer:", self.lasts)
class ticcommitter:
def __init__(self, db):
self.db = db.clone()
self.domain = db.FTN_domains["fileecho"]
self.data = None
def __del__(self):
self.db.close()
def add(self, d):
if self.data:
raise Exception("tic committer allows only one ticfile at time")
self.data = d
def commit(self):
print ("commit tic lastsent %d up to %d"%self.data)
self.db.prepare("update lastsent set lastsent=$3 where subscriber=$1 and domain=$2")(self.data[0], self.domain, self.data[1])
self.data = None
class nullcommitter:
def commit(self):
pass
# --- file export ---
def get_pkt_n(db, link_id):
if link_id is None:
link_id = db.prepare("select id from links where address is null").first()
with postgresql.alock.ExclusiveLock(db, ((PKTLOCK, link_id))):
r = db.prepare("select pktn from links where id=$1").first(link_id)
db.prepare("update links set pktn=pktn+1 where id=$1")(link_id)
return r
def get_bundle_n(db, link_id):
if link_id is None:
link_id = db.prepare("select id from links where address is null").first()
with postgresql.alock.ExclusiveLock(db, ((BUNDLELOCK, link_id))):
r = db.prepare("select bundlen from links where id=$1").first(link_id)
db.prepare("update links set bundlen=bundlen+1 where id=$1")(link_id)
return r
def get_tic_n(db, link_id):
if link_id is None:
link_id = db.prepare("select id from links where address is null").first()
with postgresql.alock.ExclusiveLock(db, ((TICLOCK, link_id))):
r = db.prepare("select ticn from links where id=$1").first(link_id)
db.prepare("update links set ticn=ticn+1 where id=$1")(link_id)
return r
def file_export(db, address, password, what):
""" This generator fetches messages from database and
yields objects, that contain the file information
and instructions how to commit to db inforamtion
about successful message delivery """
# first netmail
# then requested file
# then echoes
# then filebox
# and at last fileechoes
print("export to", repr(address), repr(password), repr(what))
link_id, addr_id, myaddr_id = ftnaccess.check_link(db, address, password, False)
if myaddr_id is None:
raise FTNWrongPassword()
print("password is correct" if link_id else "unprotected session", "local address", myaddr_id)
# WARNING!
# unprotected sessions never must do queries as it may result in leaking netmail
# if address of some hub is spoofed
myaddr_text = get_addr(db, myaddr_id)[1]
link_pkt_format, link_bundler = get_link_packing(db, link_id)
link_my_id, link_pw = ftnaccess.link_password(db, link_id, False)
if link_id and ("netmail" in what):
explock = postgresql.alock.ExclusiveLock(db, ((EXPORTLOCK["netmail"], addr_id)))
if explock.acquire(False):
try:
print ("exporting netmail")
# only vital subscriptions is processed
# non-vital (CC) should be processed just like echomail
# set password in netmail packets
p = pktpacker(link_pkt_format, myaddr_text, address, link_pw or '', lambda: get_pkt_n(db, link_id), lambda: netmailcommitter())
#..firstly send pkts in outbound
for id_msg, src, dest, msgid, header, body, origcharset, recvfrom in get_subscriber_messages_n(db, addr_id, db.FTN_domains["node"]):
print("netmail %d recvfrom %d pack to %s"%(id_msg, recvfrom, repr(address)))
# if exporting to utf8z always use UTF-8
if link_pkt_format == "utf8z":
origcharset = "utf-8"
myvia = "PyFTN " + ADDRESS + " " + time.asctime()
srca=db.prepare("select domain, text from addresses where id=$1").first(src)
dsta=db.prepare("select domain, text from addresses where id=$1").first(dest)
try:
msg, msgcharset = denormalize_message(
(db.FTN_backdomains[srca[0]], srca[1]),
(db.FTN_backdomains[dsta[0]], dsta[1]),
msgid, header, body, origcharset, address, addvia = myvia)
except:
raise Exception("denormalization error on message id=%d"%id_msg+"\n"+traceback.format_exc())
try:
print ("export msg attributes", msg.attr)
except:
traceback.print_exception()
if 'AuditRequest' in ftn.attr.binary_to_text(msg.attr):
audit_reply = (db.FTN_backdomains[srca[0]], srca[1]), header.find("sendername").text, address, msg, msgcharset
else:
audit_reply = None
for x in p.add_item(msg, (id_msg, audit_reply)): # add ARQ flag
yield x
for x in p.flush():
yield x
del p
finally:
explock.release()
else:
print ("could not acquire netmail lock")
if "direct" in what: # available for unprotected sessions
# export messages with processed==8 and destination==addr_id
explock = postgresql.alock.ExclusiveLock(db, ((EXPORTLOCK["netmail"], addr_id)))
if explock.acquire(False):
print ("exporting direct netmail")
# only vital subscriptions is processed
# non-vital (CC) should be processed just like echomail
# set password in netmail packets
p = pktpacker(link_pkt_format, myaddr_text, address, link_pw or '', lambda: get_pkt_n(db, link_id), lambda: netmailcommitter(newstatus=7))
#..firstly send pkts in outbound
for id_msg, src, dest, msgid, header, body, origcharset, recvfrom in get_direct_messages(db, addr_id):
print("direct netmail %d recvfrom %d pack to %s"%(id_msg, recvfrom, repr(address)))
# if exporting to utf8z always use UTF-8
if link_pkt_format == "utf8z":
origcharset = "utf-8"
myvia = "PyFTN " + ADDRESS + " DIRECT " + time.asctime()
srca=db.prepare("select domain, text from addresses where id=$1").first(src)
dsta=db.prepare("select domain, text from addresses where id=$1").first(dest)
try:
msg, msgcharset = denormalize_message(
(db.FTN_backdomains[srca[0]], srca[1]),
(db.FTN_backdomains[dsta[0]], dsta[1]),
msgid, header, body, origcharset, address, addvia = myvia)
except:
raise Exception("denormalization error on message id=%d"%id_msg+"\n"+traceback.format_exc())
try:
print ("export msg attributes", msg.attr)
except:
traceback.print_exception()
if 'AuditRequest' in ftn.attr.binary_to_text(msg.attr):
audit_reply = (db.FTN_backdomains[srca[0]], srca[1]), header.find("sendername").text, address, msg, msgcharset
else:
audit_reply = None
for x in p.add_item(msg, (id_msg, audit_reply)): # add ARQ flag
yield x
for x in p.flush():
yield x
del p
explock.release()
pass
if link_id and ("echomail" in what):
explock = postgresql.alock.ExclusiveLock(db, ((EXPORTLOCK["echomail"], addr_id)))
if explock.acquire(False):
try:
print ("exporting echomail")
#..firstly send bundles in outbound
#
if link_bundler:
p = pktpacker(link_pkt_format, myaddr_text, address, link_pw or '', lambda: get_pkt_n(db, link_id), lambda: echomailcommitter(),
bundlepacker(link_bundler, address, lambda: get_bundle_n(db, link_id), lambda: echomailcommitter()))
else:
p = pktpacker(link_pkt_format, myaddr_text, address, link_pw or '', lambda: get_pkt_n(db, link_id), lambda: echomailcommitter())
subscache = {}
for id_msg, xxsrc, dest, msgid, header, body, origcharset, recvfrom, withsubscr, processed in get_subscriber_messages_e(db, addr_id, db.FTN_domains["echo"]):
# ??? "my" addr in subscription - is it used here
will_export = True # do we really must send message or just update last_sent pointer
#print("echomail %d"%id_msg, repr(dest))
#print("dest %d recvfrom %s subscr %s pack to %s"%(dest, repr(recvfrom), repr(withsubscr), address))
# ignore src - for echomail it is just recv_from
# if exporting to utf8z always use UTF-8
if link_pkt_format == "utf8z":
origcharset = "utf-8"
if recvfrom == addr_id:
#print ("Message from this link, will not export")
will_export = False
if processed == 5:
#print ("Archived message, will not export")
will_export = False
# check commuter - NOT TESTED
subscriber_comm = db.FTN_commuter.get(withsubscr)
if subscriber_comm is not None: # must check as None==None => no export at all
# get subscription through what message was received
recvfrom_subscription = db.prepare("select id from subscriptions where target=$1 and subscriber=$2").first(sub_tart, m_recvfrom)
recvfrom_comm = db.FTN_commuter.get(recvfrom_subscription)
if recvfrom_comm == subscriber_comm:
print("commuter %d - %d, will not export"%(withsubscr, recvfrom_subscription))
will_export = False
# continue # do not forward between subscriptions in one commuter group (e.g. two uplinks)
if dest in subscache:
subscribers = subscache[dest]
else:
subscribers = db.prepare("select a.domain, a.text from subscriptions s, addresses a where s.target=$1 and s.subscriber=a.id")(dest)
if not all([x[0]==db.FTN_domains["node"] for x in subscribers]):
raise FTNFail("subscribers from wrong domain for "+str(sub_targ))
# print(sub_id, sub_targ, "all subscribers:", [x[1] for x in subscribers])
subscribers = subscache[dest] = [x[1] for x in subscribers]
#print("subscribers:", repr(subscribers))
# if withsubscr not in subscribers:
# raise Exception("strange: exporting to non-existent subscription", withsubscr)
dsta = db.prepare("select domain, text from addresses where id=$1").first(dest)
# modify path and seen-by
# seen-by's - get list of all subscribers of this target; add subscribers list
#... if go to another zone remove path and seen-by's and only add seen-by's of that zone -> ftnexport
if will_export: # create MSG else do not bother
try:
msg, msgcharset = denormalize_message(
("node", ADDRESS),
(db.FTN_backdomains[dsta[0]], dsta[1]),
msgid, header, body, origcharset, address, addseenby=subscribers, addpath=ADDRESS)
except:
raise Exception("denormalization error on message id=%d"%id_msg+"\n"+traceback.format_exc())
for x in p.add_item((msg if will_export else None), (withsubscr, id_msg)):
yield x
for x in p.flush():
yield x
finally:
explock.release()
else:
print("could not acquire echomail lock")
if link_id and ("filebox" in what):
explock = postgresql.alock.ExclusiveLock(db, ((EXPORTLOCK["filebox"], addr_id)))
if explock.acquire(False):
# ..send freq filebox
print ("exporting filebox")
dsend = addrdir(DOUTBOUND, address)
if os.path.isdir(dsend):
print ("exporting daemon outbound")
for f in os.listdir(dsend):
fname = os.path.join(dsend, f)
if os.path.isfile(fname):
obj = outfile()
obj.data = open(fname, "rb")
obj.filename = f
obj.length = os.path.getsize(fname)
yield obj, filecommitter(fname)
explock.release()
if link_id and ("fileecho" in what):
explock = postgresql.alock.ExclusiveLock(db, ((EXPORTLOCK["fileecho"], addr_id)))
if explock.acquire(False):
try:
print ("exporting fileechoes for", address)
subscache = {}
tic_password = password
t = ticpacker(lambda: get_tic_n(db, link_id), lambda: ticcommitter(db))
latest_post = db.prepare("select max(post_time) from file_post").first()
fruitful = False
for fp_id, fp_filename, fp_destination, fp_recv_from, fp_recv_as, fp_post_time, fp_filedata, \
fp_origin, fp_other, withsubscr, file_length, file_content, file_lo in \
db.prepare("select fp.id, fp.filename, fp.destination, fp.recv_from, fp.recv_as, fp.post_time, "
"fp.filedata, fp.origin, fp.other, s.id, f.length, f.content, f.lo "
"from file_post fp, subscriptions s, files f "
"where exists(select * from subscriptions ss where ss.target=fp.destination and ss.subscriber=$1) and s.subscriber=$1 and "
"fp.post_time>(select lastsent from lastsent where subscriber=$1) and fp.destination=s.target and f.id=fp.filedata "
"order by fp.post_time")(addr_id):
print (fp_id, fp_filename, fp_destination)
will_export = True # do we really must send message or just update last_sent pointer
if fp_recv_from == addr_id:
#print ("Message from this link, will not export")
will_export = False
other = json.loads(fp_other)
del fp_other
seenby = set(other.get("SEENBY", []))
if address in seenby:
print ("already in seenby list, not sending")
will_export = False
# dupe (own address) in path should be checked at import
# check commuter - NOT TESTED
subscriber_comm = db.FTN_commuter.get(withsubscr)
if subscriber_comm is not None: # must check as None==None => no export at all
# get subscription through what message was received
recvfrom_subscription = db.prepare("select id from subscriptions where target=$1 and subscriber=$2").first(sub_tart, m_recvfrom)
recvfrom_comm = db.FTN_commuter.get(recvfrom_subscription)
if recvfrom_comm == subscriber_comm:
print("commuter %d - %d, will not export"%(withsubscr, recvfrom_subscription))
will_export = False
# continue # do not forward between subscriptions in one commuter group (e.g. two uplinks)
if fp_destination in subscache:
subscribers = subscache[fp_destination]
else:
subscribers = db.prepare("select a.domain, a.text from subscriptions s, addresses a where s.target=$1 and s.subscriber=a.id")(fp_destination)
if not all([x[0]==db.FTN_domains["node"] for x in subscribers]):
raise FTNFail("subscribers from wrong domain for "+str(sub_targ))
# print(sub_id, sub_targ, "all subscribers:", [x[1] for x in subscribers])
subscribers = subscache[fp_destination] = [x[1] for x in subscribers]
dsta = get_addr(db, fp_destination)
# modify path and seen-by
# seen-by's - get list of all subscribers of this target; add subscribers list
if will_export:
fruitful = True # something was exported
#print("add seen-by", subscribers, " add path", myaddr_text)
other.setdefault("PATH", []).append(myaddr_text+" "+str(fp_post_time)+" "+time.asctime(time.gmtime(fp_post_time))+" PyFTN")
seenby.update(subscribers)
other["SEENBY"]=list(seenby)
if "CRC" in other:
file_crc32 = other.pop("CRC")[0]
print ("filename %s length %d crc %s"%(fp_filename, file_length, file_crc32))
else:
if file_content:
file_crc32 = ftntic.sz_crc32s(file_content)[1]
else:
file_crc32 = ftntic.sz_crc32fd(lo.LOIOReader(db, file_lo))[1]
#if fp_origin is None:
# print("substitute empty origin with source")
# fp_origin = fp_recv_from
tic = ftntic.make_tic(myaddr_text, address, tic_password, dsta[1], get_addr(db, fp_origin)[1] if fp_origin else None,
fp_filename, file_length, file_crc32, other)
# code was migrated from echo export, so it cares about blocked exports to update
# per-subscription last-sents
for x in t.add_item(((tic, (fp_filename, file_length, file_content or (db, file_lo))) if will_export else None), (addr_id, fp_post_time)): # ordering by post_time
yield x
if not fruitful:
print("empty export, must update per-subscriber lastsent up to", latest_post)
for x in t.flush():
yield x
finally:
explock.release()
else:
print ("cannot acquire fileecho lock for", addr_id)
return
class outfile:
def commit(self):
self.commitdb()
# filename
# data
# length
def show(self):
print ("exported file",self.filename,self.length)
class pktpacker:
def __init__(self, format, me, node, passw, counter, commitgen, packto=None):
self.format = format
self.packet = None
self.node = node
self.me = me
self.packto = packto
self.counter = counter
self.commitgen = commitgen
self.passw = passw.encode("utf-8")[:8]
self.committer = None
def add_item(self, m, commitdata):
if m is not None:
if self.packet is None:
self.packet=ftn.pkt.PKT()
self.packet.password=self.passw
self.packet.source=ftn.addr.str2addr(self.me)
self.packet.destination=ftn.addr.str2addr(self.node)
self.packet.date=time.localtime()
self.packet.msg=[]
self.packet.approxlen=0
self.start_time = get_time()
self.packet.msg.append(m)
self.packet.approxlen+=len(m.pack()) # double packing :(
if self.committer is None:
self.committer = self.commitgen()
# if m is not None it will be send else just last_sent will be updated
self.committer.add(commitdata)
if self.packet and (self.packet.approxlen>PACKETTHRESHOLD or get_time()-self.start_time>PACKETTIMELIMIT):
for x in self.pack():
yield x
def pack(self):
if self.packet is not None:
p = outfile()
if self.format=='utf8z':
p.filename = "%08x.upkt"%self.counter()
else:
p.filename = "%08x.pkt"%self.counter()
print("PACKET %s"%p.filename)
p.data = io.BytesIO()
self.packet.save(p.data, format=self.format)
p.length = p.data.tell()
p.data.seek(0)
self.packet = None
else:
print("PACKET = None")
print("Committer:")
print(self.committer.show() if self.committer else "None")
p = None
if p or self.committer:
if self.packto:
for x in self.packto.add_item(p, self.committer):
yield x
else:
yield p, self.committer
self.committer = None
def flush(self):
for x in self.pack():
yield x
if self.packto:
for x in self.packto.flush():
yield x
class bundlepacker:
def __init__(self, bundler, destination, counter, commitgen, packto=None):
self.bundler = bundler
self.destination = destination
self.counter = counter
self.bundle = None
self.commitgen = commitgen
self.packto = packto
self.committer = None
def add_item(self, p, commitdata):
if p is not None:
if self.bundle is None:
self.start_time = get_time()
fo = io.BytesIO()
self.bundle = (fo, zipfile.ZipFile(fo, "w", zipfile.ZIP_DEFLATED))
self.bundle[1].writestr(p.filename, p.data.read())