-
Notifications
You must be signed in to change notification settings - Fork 0
/
Operator.py
87 lines (74 loc) · 2.33 KB
/
Operator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import collections.abc as collections
import torch
class Operator:
def __init__(self, lat_site):
self.lat_site = lat_site
self.switching_map = 1
self.mat_els = torch.tensor([1., 1.])
self.name = 'not yet named'
def __str__(self):
return self.name+f"_{self.lat_site}"
#multiplication of operator: tuple
#sum of operators: list
def __mul__(self, other):
if isinstance(other, (float, int)):
self.mat_els *= other
return self
elif isinstance(other, Operator):
if other.lat_site == self.lat_site:
print('multiplication of operators on same lattice site nyi')
return self
else:
return [(self,other)]
else:
print('multiplication for this type nyi')
return self
def __rmul__(self, other):
return self.__mul__(other)
def __add__(self, other):
if isinstance(other, Operator):
return [(self,), (other,)]
elif isinstance(other, collections.Sequence):
if (type(other) == list):
if (len(other) == 0 or isinstance(other[0][0], Operator)):
return [(self,)] + other
else:
print('cant add to sequence, wrong type')
elif (type(other) == tuple):
if (isinstance(other[0], Operator)):
return [(self,), other]
else:
print('cant add to sequence, wrong type')
else:
print('add for this type nyi')
return self
def __radd__(self, other):
return self.__add__(other)
class Sx(Operator):
def __init__(self, lat_site):
super().__init__(lat_site)
self.switching_map = -1
self.mat_els = torch.tensor([1., 1.], dtype=torch.complex64)
self.name = '\sigma^x'
class Sy(Operator):
def __init__(self, lat_site):
super().__init__(lat_site)
self.switching_map = -1
self.mat_els = torch.tensor([1.j, -1.j], dtype=torch.complex64)
self.name = '\sigma^y'
class Sz(Operator):
def __init__(self, lat_site):
super().__init__(lat_site)
self.switching_map = 1
self.mat_els = torch.tensor([1., -1.], dtype=torch.complex64)
self.name = '\sigma^z'
def print_op_list(op_list):
out_str = "Hamiltonan = "
for j, op_tuple in enumerate(op_list):
if (j>0 and j<len(op_list)):
out_str += (" + ")
for i, op in enumerate(op_tuple):
out_str += str(op)
if (i<(len(op_tuple)-1)):
out_str += (" * ")
print(out_str)