Skip to content

Commit

Permalink
Correct formula for Issue #57
Browse files Browse the repository at this point in the history
  • Loading branch information
ccrock4t committed Nov 19, 2023
1 parent bcda926 commit a4c6c09
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pynars/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class Enable:
debug = False

class Config:
r_top_level_attention = 0.5
r_top_level_attention_adjust = 0.5

priority: float=0.8
durability: float=0.8
Expand Down
10 changes: 9 additions & 1 deletion pynars/NARS/Control/Reasoner.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ def __init__(self, n_memory, capacity, config = './config.json', nal_rules={1,2,
self.sequence_buffer = Buffer(capacity)
self.operations_buffer = Buffer(capacity)

self.u_top_level_attention = 0.5

def reset(self):
''''''
# TODO
Expand Down Expand Up @@ -71,10 +73,16 @@ def cycle(self):

random_number: float = random.random()

if random_number < Config.Config.r_top_level_attention:
data_structure_accessed_busyness = None
if random_number < self.u_top_level_attention:
judgement_revised, goal_revised, answers_question, answers_quest = self.observe(tasks_derived)
data_structure_accessed_busyness = self.overall_experience.busyness
else:
self.consider(tasks_derived)
data_structure_accessed_busyness = self.memory.busyness

self.u_top_level_attention = Config.Config.r_top_level_attention_adjust * data_structure_accessed_busyness \
+ (1 - Config.Config.r_top_level_attention_adjust) * self.u_top_level_attention

# temporal induction in NAL-7
if Enable.temporal_reasoning and task is not None and task.is_judgement and task.is_external_event:
Expand Down
1 change: 1 addition & 0 deletions pynars/NARS/DataStructures/_py/Bag.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def map_priority(priority: float):
return idx if idx < self.n_levels else self.n_levels - 1

self.map_priority = map_priority
self.busyness = 0.5

def take(self, remove = True) -> Item:
if len(self) == 0: return None
Expand Down
2 changes: 2 additions & 0 deletions pynars/NARS/DataStructures/_py/Buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@ class Buffer(Bag):
existing concepts in the memory or tasks in the buffer.
'''


def __init__(self, capacity: int, n_buckets: int=None, take_in_order: bool=False, max_duration: int=None) -> None:
key: Callable[[Task], Any] = lambda task: (hash(task), hash(task.stamp.evidential_base))
Bag.__init__(self, capacity, n_buckets=n_buckets, take_in_order=take_in_order, key=key)
self.max_duration = max_duration if max_duration is not None else Config.max_duration
self.busyness = 0.5

# def put(self, task: Task):
# return Bag.put(self, task, (hash(task), hash(task.stamp.evidential_base)))
Expand Down
4 changes: 4 additions & 0 deletions pynars/NARS/DataStructures/_py/Memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ def __init__(self, capacity: int, n_buckets: int = None, take_in_order: bool = F
self.concepts = Bag(capacity, n_buckets=n_buckets, take_in_order=take_in_order)
self.output_buffer = output_buffer

@property
def busyness(self):
return self.concepts.busyness

def accept(self, task: Task):
'''
**Accept task**: Accept a task from the `Overall Experience`, and link it from all directly related concepts. Ref: *The Conceptual Design of OpenNARS 3.1.0*.
Expand Down
1 change: 0 additions & 1 deletion pynars/Narsese/_py/Sentence.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

from pynars.Config import Config, Enable
from pynars import Global
from ...NAL.Functions import F_expectation


class Punctuation(Enum):
Expand Down

0 comments on commit a4c6c09

Please sign in to comment.