Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • android_compatible_0.14
  • ipaaca-json
  • ipaaca-rsb
  • ipaaca3-dev
  • ipaaca4
  • ipaaca4py3
  • kompass
  • legacy-str
  • master
  • mqtt_port
  • rsb0.11
  • rsb0.14
  • ryt-fullport
  • softwareweek2019
  • windows-compatibility
15 results

Target

Select target project
  • scs/ipaaca
  • ramin.yaghoubzadeh/ipaaca
2 results
Select Git revision
  • ipaaca-rsb
  • ipaaca3-dev
  • ipaaca4
  • kompass
  • master
  • rsb0.14
6 results
Show changes
Showing
with 4435 additions and 1394 deletions
#!/usr/bin/env python
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Sociable Agents Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
import time
import ipaaca
def remote_change_dumper(iu, event_type, local):
if local:
print 'remote side '+event_type+': '+str(iu)
print('remote side '+event_type+': '+str(iu))
ob = ipaaca.OutputBuffer('CoolInformerOut')
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import logging
import sys
import threading
import uuid
import collections
import copy
import rsb
import rsb.converter
import ipaaca_pb2
OMIT_REVISION_CHECKS = False
# IDEAS
# We should think about relaying the update event (or at least the
# affected keys in the payload / links) to the event handlers!
# THOUGHTS
# Output buffers could generate UIDs for IUs on request, without
# publishing them at that time. Then UID could then be used
# for internal links etc. The IU may be published later through
# the same buffer that allocated the UID.
# WARNINGS
# category is now the FIRST argument for IU constructors
__all__ = [
'IUEventType',
'IUAccessMode',
'InputBuffer', 'OutputBuffer',
'IU',
'IUPublishedError', 'IUUpdateFailedError', 'IUCommittedError', 'IUReadOnlyError', 'IUNotFoundError',
'logger'
]
## --- Utilities -------------------------------------------------------------
def enum(*sequential, **named):
"""Create an enum type.
Based on suggestion of Alec Thomas on stackoverflow.com:
http://stackoverflow.com/questions/36932/
whats-the-best-way-to-implement-an-enum-in-python/1695250#1695250
"""
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
def pack_typed_payload_item(protobuf_object, key, value):
protobuf_object.key = key
protobuf_object.value = value
protobuf_object.type = 'str' # TODO: more types
def unpack_typed_payload_item(protobuf_object):
# TODO: more types
return (protobuf_object.key, protobuf_object.value)
class IpaacaLoggingHandler(logging.Handler):
def __init__(self, level=logging.DEBUG):
logging.Handler.__init__(self, level)
def emit(self, record):
meta = '[ipaaca] (' + str(record.levelname) + ') '
msg = str(record.msg.format(record.args))
print(meta + msg)
## --- Global Definitions ----------------------------------------------------
IUEventType = enum(
ADDED = 'ADDED',
COMMITTED = 'COMMITTED',
DELETED = 'DELETED',
RETRACTED = 'RETRACTED',
UPDATED = 'UPDATED',
LINKSUPDATED = 'LINKSUPDATED',
MESSAGE = 'MESSAGE'
)
IUAccessMode = enum(
"PUSH",
"REMOTE",
"MESSAGE"
)
## --- Errors and Exceptions -------------------------------------------------
class IUPublishedError(Exception):
"""Error publishing of an IU failed since it is already in the buffer."""
def __init__(self, iu):
super(IUPublishedError, self).__init__('IU ' + str(iu.uid) + ' is already present in the output buffer.')
class IUUpdateFailedError(Exception):
"""Error indicating that a remote IU update failed."""
def __init__(self, iu):
super(IUUpdateFailedError, self).__init__('Remote update failed for IU ' + str(iu.uid) + '.')
class IUCommittedError(Exception):
"""Error indicating that an IU is immutable because it has been committed to."""
def __init__(self, iu):
super(IUCommittedError, self).__init__('Writing to IU ' + str(iu.uid) + ' failed -- it has been committed to.')
class IUReadOnlyError(Exception):
"""Error indicating that an IU is immutable because it is 'read only'."""
def __init__(self, iu):
super(IUReadOnlyError, self).__init__('Writing to IU ' + str(iu.uid) + ' failed -- it is read-only.')
class IUNotFoundError(Exception):
"""Error indicating that an IU UID was unexpectedly not found in an internal store."""
def __init__(self, iu_uid):
super(IUNotFoundError, self).__init__('Lookup of IU ' + str(iu_uid) + ' failed.')
## --- Generation Architecture -----------------------------------------------
class Payload(dict):
def __init__(self, iu, writer_name=None, new_payload=None, omit_init_update_message=False):
pl1 = {} if new_payload is None else new_payload
pl = {}
for k,v in pl1.items():
if type(k)==str:
k=unicode(k,'utf8')
if type(v)==str:
v=unicode(v,'utf8')
pl[k] = v
self.iu = iu
# NOTE omit_init_update_message is necessary to prevent checking for
# exceptions and sending updates in the case where we just receive
# a whole new payload from the remote side and overwrite it locally.
if (not omit_init_update_message) and (self.iu.buffer is not None):
self.iu._modify_payload(is_delta=False, new_items=pl, keys_to_remove=[], writer_name=writer_name)
for k, v in pl.items():
dict.__setitem__(self, k, v)
def __setitem__(self, k, v, writer_name=None):
if type(k)==str:
k=unicode(k,'utf8')
if type(v)==str:
v=unicode(v,'utf8')
self.iu._modify_payload(is_delta=True, new_items={k:v}, keys_to_remove=[], writer_name=writer_name)
result = dict.__setitem__(self, k, v)
def __delitem__(self, k, writer_name=None):
if type(k)==str:
k=unicode(k,'utf8')
self.iu._modify_payload(is_delta=True, new_items={}, keys_to_remove=[k], writer_name=writer_name)
result = dict.__delitem__(self, k)
def _remotely_enforced_setitem(self, k, v):
"""Sets an item when requested remotely."""
return dict.__setitem__(self, k, v)
def _remotely_enforced_delitem(self, k):
"""Deletes an item when requested remotely."""
return dict.__delitem__(self, k)
class IUInterface(object): #{{{
"""Base class of all specialised IU classes."""
def __init__(self, uid, access_mode=IUAccessMode.PUSH, read_only=False):
"""Creates an IU.
Keyword arguments:
uid -- unique ID of this IU
access_mode -- access mode of this IU
read_only -- flag indicating whether this IU is read_only or not
"""
self._uid = uid
self._revision = None
self._category = None
self._payload_type = None
self._owner_name = None
self._committed = False
self._retracted = False
self._access_mode = access_mode
self._read_only = read_only
self._buffer = None
# payload is not present here
self._links = collections.defaultdict(set)
def __str__(self):
s = unicode(self.__class__)+"{ "
s += "uid="+self._uid+" "
s += "(buffer="+(self.buffer.unique_name if self.buffer is not None else "<None>")+") "
s += "owner_name=" + ("<None>" if self.owner_name is None else self.owner_name) + " "
s += "payload={ "
for k,v in self.payload.items():
s += k+":'"+v+"', "
s += "} "
s += "links={ "
for t,ids in self.get_all_links().items():
s += t+":'"+str(ids)+"', "
s += "} "
s += "}"
return s
def _add_and_remove_links(self, add, remove):
'''Just add and remove the new links in our links set, do not send an update here'''
'''Note: Also used for remotely enforced links updates.'''
for type in remove.keys(): self._links[type] -= set(remove[type])
for type in add.keys(): self._links[type] |= set(add[type])
def _replace_links(self, links):
'''Just wipe and replace our links set, do not send an update here'''
'''Note: Also used for remotely enforced links updates.'''
self._links = collections.defaultdict(set)
for type in links.keys(): self._links[type] |= set(links[type])
def add_links(self, type, targets, writer_name=None):
'''Attempt to add links if the conditions are met
and send an update message. Then call the local setter.'''
if not hasattr(targets, '__iter__'): targets=[targets]
self._modify_links(is_delta=True, new_links={type:targets}, links_to_remove={}, writer_name=writer_name)
self._add_and_remove_links( add={type:targets}, remove={} )
def remove_links(self, type, targets, writer_name=None):
'''Attempt to remove links if the conditions are met
and send an update message. Then call the local setter.'''
if not hasattr(targets, '__iter__'): targets=[targets]
self._modify_links(is_delta=True, new_links={}, links_to_remove={type:targets}, writer_name=writer_name)
self._add_and_remove_links( add={}, remove={type:targets} )
def modify_links(self, add, remove, writer_name=None):
'''Attempt to modify links if the conditions are met
and send an update message. Then call the local setter.'''
self._modify_links(is_delta=True, new_links=add, links_to_remove=remove, writer_name=writer_name)
self._add_and_remove_links( add=add, remove=remove )
def set_links(self, links, writer_name=None):
'''Attempt to set (replace) links if the conditions are met
and send an update message. Then call the local setter.'''
self._modify_links(is_delta=False, new_links=links, links_to_remove={}, writer_name=writer_name)
self._replace_links( links=links )
def get_links(self, type):
return set(self._links[type])
def get_all_links(self):
return copy.deepcopy(self._links)
def _get_revision(self):
return self._revision
revision = property(fget=_get_revision, doc='Revision number of the IU.')
def _get_category(self):
return self._category
category = property(fget=_get_category, doc='Category of the IU.')
def _get_payload_type(self):
return self._payload_type
payload_type = property(fget=_get_payload_type, doc='Type of the IU payload')
def _get_committed(self):
return self._committed
committed = property(
fget=_get_committed,
doc='Flag indicating whether this IU has been committed to.')
def _get_retracted(self):
return self._retracted
retracted = property(
fget=_get_retracted,
doc='Flag indicating whether this IU has been retracted.')
def _get_uid(self):
return self._uid
uid = property(fget=_get_uid, doc='Unique ID of the IU.')
def _get_access_mode(self):
return self._access_mode
access_mode = property(fget=_get_access_mode, doc='Access mode of the IU.')
def _get_read_only(self):
return self._read_only
read_only = property(
fget=_get_read_only,
doc='Flag indicating whether this IU is read only.')
def _get_buffer(self):
return self._buffer
def _set_buffer(self, buffer):
if self._buffer is not None:
raise Exception('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
buffer = property(
fget=_get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _get_owner_name(self):
return self._owner_name
def _set_owner_name(self, owner_name):
if self._owner_name is not None:
raise Exception('The IU already has an owner name, cannot change it.')
self._owner_name = owner_name
owner_name = property(
fget=_get_owner_name,
fset=_set_owner_name,
doc="The IU's owner's name.")
#}}}
class IU(IUInterface):#{{{
"""A local IU."""
def __init__(self, category='undef', access_mode=IUAccessMode.PUSH, read_only=False, _payload_type='MAP'):
super(IU, self).__init__(uid=None, access_mode=access_mode, read_only=read_only)
self._revision = 1
self.uid = str(uuid.uuid4())
self._category = category
self._payload_type = _payload_type
self.revision_lock = threading.RLock()
self._payload = Payload(iu=self)
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
if self.committed:
raise IUCommittedError(self)
with self.revision_lock:
# modify links locally
self._increase_revision_number()
if self.is_published:
# send update to remote holders
self.buffer._send_iu_link_update(
self,
revision=self.revision,
is_delta=is_delta,
new_links=new_links,
links_to_remove=links_to_remove,
writer_name=self.owner_name if writer_name is None else writer_name)
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
"""Modify the payload: add or remove items from this payload locally and send update."""
if self.committed:
raise IUCommittedError(self)
with self.revision_lock:
# set item locally
# FIXME: Is it actually set locally?
self._increase_revision_number()
if self.is_published:
# send update to remote holders
self.buffer._send_iu_payload_update(
self,
revision=self.revision,
is_delta=is_delta,
new_items=new_items,
keys_to_remove=keys_to_remove,
writer_name=self.owner_name if writer_name is None else writer_name)
def _increase_revision_number(self):
self._revision += 1
def _internal_commit(self, writer_name=None):
if self.committed:
raise IUCommittedError(self)
with self.revision_lock:
if not self._committed:
self._increase_revision_number()
self._committed = True
if self.buffer is not None:
self.buffer._send_iu_commission(self, writer_name=writer_name)
def commit(self):
"""Commit to this IU."""
return self._internal_commit()
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl, writer_name=None):
if self.committed:
raise IUCommittedError(self)
with self.revision_lock:
self._increase_revision_number()
self._payload = Payload(
iu=self,
writer_name=None if self.buffer is None else (self.buffer.unique_name if writer_name is None else writer_name),
new_payload=new_pl)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of this IU.')
def _get_is_published(self):
return self.buffer is not None
is_published = property(
fget=_get_is_published,
doc='Flag indicating whether this IU has been published or not.')
def _set_buffer(self, buffer):
if self._buffer is not None:
raise Exception('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
self.owner_name = buffer.unique_name
self._payload.owner_name = buffer.unique_name
buffer = property(
fget=IUInterface._get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _set_uid(self, uid):
if self._uid is not None:
raise AttributeError('The uid of IU ' + self.uid + ' has already been set, cannot change it.')
self._uid = uid
uid = property(
fget=IUInterface._get_uid,
fset=_set_uid,
doc='Unique ID of the IU.')
#}}}
class Message(IU):#{{{
"""Local IU of Message sub-type. Can be handled like a normal IU, but on the remote side it is only existent during the handler calls."""
def __init__(self, category='undef', access_mode=IUAccessMode.MESSAGE, read_only=True, _payload_type='MAP'):
super(Message, self).__init__(category=category, access_mode=access_mode, read_only=read_only, _payload_type=_payload_type)
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
if self.is_published:
logger.info('Info: modifying a Message after sending has no global effects')
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
if self.is_published:
logger.info('Info: modifying a Message after sending has no global effects')
def _increase_revision_number(self):
self._revision += 1
def _internal_commit(self, writer_name=None):
if self.is_published:
logger.info('Info: committing to a Message after sending has no global effects')
def commit(self):
return self._internal_commit()
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl, writer_name=None):
if self.is_published:
logger.info('Info: modifying a Message after sending has no global effects')
else:
if self.committed:
raise IUCommittedError(self)
with self.revision_lock:
self._increase_revision_number()
self._payload = Payload(
iu=self,
writer_name=None if self.buffer is None else (self.buffer.unique_name if writer_name is None else writer_name),
new_payload=new_pl)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of this IU.')
def _get_is_published(self):
return self.buffer is not None
is_published = property(
fget=_get_is_published,
doc='Flag indicating whether this IU has been published or not.')
def _set_buffer(self, buffer):
if self._buffer is not None:
raise Exception('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
self.owner_name = buffer.unique_name
self._payload.owner_name = buffer.unique_name
buffer = property(
fget=IUInterface._get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _set_uid(self, uid):
if self._uid is not None:
raise AttributeError('The uid of IU ' + self.uid + ' has already been set, cannot change it.')
self._uid = uid
uid = property(
fget=IUInterface._get_uid,
fset=_set_uid,
doc='Unique ID of the IU.')
#}}}
class RemoteMessage(IUInterface):#{{{
"""A remote IU with access mode 'MESSAGE'."""
def __init__(self, uid, revision, read_only, owner_name, category, payload_type, committed, payload, links):
super(RemoteMessage, self).__init__(uid=uid, access_mode=IUAccessMode.PUSH, read_only=read_only)
self._revision = revision
self._category = category
self.owner_name = owner_name
self._payload_type = payload_type
self._committed = committed
self._retracted = False
# NOTE Since the payload is an already-existant Payload which we didn't modify ourselves,
# don't try to invoke any modification checks or network updates ourselves either.
# We are just receiving it here and applying the new data.
self._payload = Payload(iu=self, new_payload=payload, omit_init_update_message=True)
self._links = links
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
logger.info('Info: modifying a RemoteMessage only has local effects')
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
logger.info('Info: modifying a RemoteMessage only has local effects')
def commit(self):
logger.info('Info: committing to a RemoteMessage only has local effects')
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl):
logger.info('Info: modifying a RemoteMessage only has local effects')
self._payload = Payload(iu=self, new_payload=new_pl, omit_init_update_message=True)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of the IU.')
def _apply_link_update(self, update):
"""Apply a IULinkUpdate to the IU."""
logger.warning('Warning: should never be called: RemoteMessage._apply_link_update')
self._revision = update.revision
if update.is_delta:
self._add_and_remove_links(add=update.new_links, remove=update.links_to_remove)
else:
self._replace_links(links=update.new_links)
def _apply_update(self, update):
"""Apply a IUPayloadUpdate to the IU."""
logger.warning('Warning: should never be called: RemoteMessage._apply_update')
self._revision = update.revision
if update.is_delta:
for k in update.keys_to_remove: self.payload._remotely_enforced_delitem(k)
for k, v in update.new_items.items(): self.payload._remotely_enforced_setitem(k, v)
else:
# NOTE Please read the comment in the constructor
self._payload = Payload(iu=self, new_payload=update.new_items, omit_init_update_message=True)
def _apply_commission(self):
"""Apply commission to the IU"""
logger.warning('Warning: should never be called: RemoteMessage._apply_commission')
self._committed = True
def _apply_retraction(self):
"""Apply retraction to the IU"""
logger.warning('Warning: should never be called: RemoteMessage._apply_retraction')
self._retracted = True
#}}}
class RemotePushIU(IUInterface):#{{{
"""A remote IU with access mode 'PUSH'."""
def __init__(self, uid, revision, read_only, owner_name, category, payload_type, committed, payload, links):
super(RemotePushIU, self).__init__(uid=uid, access_mode=IUAccessMode.PUSH, read_only=read_only)
self._revision = revision
self._category = category
self.owner_name = owner_name
self._payload_type = payload_type
self._committed = committed
self._retracted = False
# NOTE Since the payload is an already-existant Payload which we didn't modify ourselves,
# don't try to invoke any modification checks or network updates ourselves either.
# We are just receiving it here and applying the new data.
self._payload = Payload(iu=self, new_payload=payload, omit_init_update_message=True)
self._links = links
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
"""Modify the links: add or remove item from this payload remotely and send update."""
if self.committed:
raise IUCommittedError(self)
if self.read_only:
raise IUReadOnlyError(self)
requested_update = IULinkUpdate(
uid=self.uid,
revision=self.revision,
is_delta=is_delta,
writer_name=self.buffer.unique_name,
new_links=new_links,
links_to_remove=links_to_remove)
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updateLinks(requested_update)
if new_revision == 0:
raise IUUpdateFailedError(self)
else:
self._revision = new_revision
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
"""Modify the payload: add or remove item from this payload remotely and send update."""
if self.committed:
raise IUCommittedError(self)
if self.read_only:
raise IUReadOnlyError(self)
requested_update = IUPayloadUpdate(
uid=self.uid,
revision=self.revision,
is_delta=is_delta,
writer_name=self.buffer.unique_name,
new_items=new_items,
keys_to_remove=keys_to_remove)
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updatePayload(requested_update)
if new_revision == 0:
raise IUUpdateFailedError(self)
else:
self._revision = new_revision
def commit(self):
"""Commit to this IU."""
if self.read_only:
raise IUReadOnlyError(self)
if self._committed:
# ignore commit requests when already committed
return
else:
commission_request = ipaaca_pb2.IUCommission()
commission_request.uid = self.uid
commission_request.revision = self.revision
commission_request.writer_name = self.buffer.unique_name
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.commit(commission_request)
if new_revision == 0:
raise IUUpdateFailedError(self)
else:
self._revision = new_revision
self._committed = True
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl):
if self.committed:
raise IUCommittedError(self)
if self.read_only:
raise IUReadOnlyError(self)
requested_update = IUPayloadUpdate(
uid=self.uid,
revision=self.revision,
is_delta=False,
writer_name=self.buffer.unique_name,
new_items=new_pl,
keys_to_remove=[])
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updatePayload(requested_update)
if new_revision == 0:
raise IUUpdateFailedError(self)
else:
self._revision = new_revision
# NOTE Please read the comment in the constructor
self._payload = Payload(iu=self, new_payload=new_pl, omit_init_update_message=True)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of the IU.')
def _apply_link_update(self, update):
"""Apply a IULinkUpdate to the IU."""
self._revision = update.revision
if update.is_delta:
self._add_and_remove_links(add=update.new_links, remove=update.links_to_remove)
else:
self._replace_links(links=update.new_links)
def _apply_update(self, update):
"""Apply a IUPayloadUpdate to the IU."""
self._revision = update.revision
if update.is_delta:
for k in update.keys_to_remove: self.payload._remotely_enforced_delitem(k)
for k, v in update.new_items.items(): self.payload._remotely_enforced_setitem(k, v)
else:
# NOTE Please read the comment in the constructor
self._payload = Payload(iu=self, new_payload=update.new_items, omit_init_update_message=True)
def _apply_commission(self):
"""Apply commission to the IU"""
self._committed = True
def _apply_retraction(self):
"""Apply retraction to the IU"""
self._retracted = True
#}}}
class IntConverter(rsb.converter.Converter):#{{{
"""Convert Python int objects to Protobuf ints and vice versa."""
def __init__(self, wireSchema="int", dataType=int):
super(IntConverter, self).__init__(bytearray, dataType, wireSchema)
def serialize(self, value):
pbo = ipaaca_pb2.IntMessage()
pbo.value = value
return bytearray(pbo.SerializeToString()), self.wireSchema
def deserialize(self, byte_stream, ws):
pbo = ipaaca_pb2.IntMessage()
pbo.ParseFromString( str(byte_stream) )
return pbo.value
#}}}
class IUConverter(rsb.converter.Converter):#{{{
'''
Converter class for Full IU representations
wire:bytearray <-> wire-schema:ipaaca-full-iu <-> class ipaacaRSB.IU
'''
def __init__(self, wireSchema="ipaaca-iu", dataType=IU):
super(IUConverter, self).__init__(bytearray, dataType, wireSchema)
def serialize(self, iu):
pbo = ipaaca_pb2.IU()
pbo.uid = iu._uid
pbo.revision = iu._revision
pbo.category = iu._category
pbo.payload_type = iu._payload_type
pbo.owner_name = iu._owner_name
pbo.committed = iu._committed
pbo.access_mode = iu._access_mode #ipaaca_pb2.IU.PUSH # TODO
pbo.read_only = iu._read_only
for k,v in iu._payload.items():
entry = pbo.payload.add()
pack_typed_payload_item(entry, k, v)
for type_ in iu._links.keys():
linkset = pbo.links.add()
linkset.type = type_
linkset.targets.extend(iu._links[type_])
return bytearray(pbo.SerializeToString()), self.wireSchema
def deserialize(self, byte_stream, ws):
type = self.getDataType()
if type == IU:
pbo = ipaaca_pb2.IU()
pbo.ParseFromString( str(byte_stream) )
if pbo.access_mode == ipaaca_pb2.IU.PUSH:
_payload = {}
for entry in pbo.payload:
k, v = unpack_typed_payload_item(entry)
_payload[k] = v
_links = collections.defaultdict(set)
for linkset in pbo.links:
for target_uid in linkset.targets:
_links[linkset.type].add(target_uid)
remote_push_iu = RemotePushIU(
uid=pbo.uid,
revision=pbo.revision,
read_only = pbo.read_only,
owner_name = pbo.owner_name,
category = pbo.category,
payload_type = pbo.payload_type,
committed = pbo.committed,
payload=_payload,
links=_links
)
return remote_push_iu
elif pbo.access_mode == ipaaca_pb2.IU.MESSAGE:
_payload = {}
for entry in pbo.payload:
k, v = unpack_typed_payload_item(entry)
_payload[k] = v
_links = collections.defaultdict(set)
for linkset in pbo.links:
for target_uid in linkset.targets:
_links[linkset.type].add(target_uid)
remote_message = RemoteMessage(
uid=pbo.uid,
revision=pbo.revision,
read_only = pbo.read_only,
owner_name = pbo.owner_name,
category = pbo.category,
payload_type = pbo.payload_type,
committed = pbo.committed,
payload=_payload,
links=_links
)
return remote_message
else:
raise Exception("We can only handle IUs with access mode 'PUSH' or 'MESSAGE' for now!")
else:
raise ValueError("Inacceptable dataType %s" % type)
#}}}
class IULinkUpdate(object):#{{{
def __init__(self, uid, revision, is_delta, writer_name="undef", new_links=None, links_to_remove=None):
super(IULinkUpdate, self).__init__()
self.uid = uid
self.revision = revision
self.writer_name = writer_name
self.is_delta = is_delta
self.new_links = collections.defaultdict(set) if new_links is None else collections.defaultdict(set, new_links)
self.links_to_remove = collections.defaultdict(set) if links_to_remove is None else collections.defaultdict(set, links_to_remove)
def __str__(self):
s = 'LinkUpdate(' + 'uid=' + self.uid + ', '
s += 'revision='+str(self.revision)+', '
s += 'writer_name='+str(self.writer_name)+', '
s += 'is_delta='+str(self.is_delta)+', '
s += 'new_links = '+str(self.new_links)+', '
s += 'links_to_remove = '+str(self.links_to_remove)+')'
return s
#}}}
class IUPayloadUpdate(object):#{{{
def __init__(self, uid, revision, is_delta, writer_name="undef", new_items=None, keys_to_remove=None):
super(IUPayloadUpdate, self).__init__()
self.uid = uid
self.revision = revision
self.writer_name = writer_name
self.is_delta = is_delta
self.new_items = {} if new_items is None else new_items
self.keys_to_remove = [] if keys_to_remove is None else keys_to_remove
def __str__(self):
s = 'PayloadUpdate(' + 'uid=' + self.uid + ', '
s += 'revision='+str(self.revision)+', '
s += 'writer_name='+str(self.writer_name)+', '
s += 'is_delta='+str(self.is_delta)+', '
s += 'new_items = '+str(self.new_items)+', '
s += 'keys_to_remove = '+str(self.keys_to_remove)+')'
return s
#}}}
class IULinkUpdateConverter(rsb.converter.Converter):#{{{
def __init__(self, wireSchema="ipaaca-iu-link-update", dataType=IULinkUpdate):
super(IULinkUpdateConverter, self).__init__(bytearray, dataType, wireSchema)
def serialize(self, iu_link_update):
pbo = ipaaca_pb2.IULinkUpdate()
pbo.uid = iu_link_update.uid
pbo.writer_name = iu_link_update.writer_name
pbo.revision = iu_link_update.revision
for type_ in iu_link_update.new_links.keys():
linkset = pbo.new_links.add()
linkset.type = type_
linkset.targets.extend(iu_link_update.new_links[type_])
for type_ in iu_link_update.links_to_remove.keys():
linkset = pbo.links_to_remove.add()
linkset.type = type_
linkset.targets.extend(iu_link_update.links_to_remove[type_])
pbo.is_delta = iu_link_update.is_delta
return bytearray(pbo.SerializeToString()), self.wireSchema
def deserialize(self, byte_stream, ws):
type = self.getDataType()
if type == IULinkUpdate:
pbo = ipaaca_pb2.IULinkUpdate()
pbo.ParseFromString( str(byte_stream) )
logger.debug('received an IULinkUpdate for revision '+str(pbo.revision))
iu_link_up = IULinkUpdate( uid=pbo.uid, revision=pbo.revision, writer_name=pbo.writer_name, is_delta=pbo.is_delta)
for entry in pbo.new_links:
iu_link_up.new_links[str(entry.type)] = set(entry.targets)
for entry in pbo.links_to_remove:
iu_link_up.links_to_remove[str(entry.type)] = set(entry.targets)
return iu_link_up
else:
raise ValueError("Inacceptable dataType %s" % type)
#}}}
class IUPayloadUpdateConverter(rsb.converter.Converter):#{{{
def __init__(self, wireSchema="ipaaca-iu-payload-update", dataType=IUPayloadUpdate):
super(IUPayloadUpdateConverter, self).__init__(bytearray, dataType, wireSchema)
def serialize(self, iu_payload_update):
pbo = ipaaca_pb2.IUPayloadUpdate()
pbo.uid = iu_payload_update.uid
pbo.writer_name = iu_payload_update.writer_name
pbo.revision = iu_payload_update.revision
for k,v in iu_payload_update.new_items.items():
entry = pbo.new_items.add()
pack_typed_payload_item(entry, k, v)
pbo.keys_to_remove.extend(iu_payload_update.keys_to_remove)
pbo.is_delta = iu_payload_update.is_delta
return bytearray(pbo.SerializeToString()), self.wireSchema
def deserialize(self, byte_stream, ws):
type = self.getDataType()
if type == IUPayloadUpdate:
pbo = ipaaca_pb2.IUPayloadUpdate()
pbo.ParseFromString( str(byte_stream) )
logger.debug('received an IUPayloadUpdate for revision '+str(pbo.revision))
iu_up = IUPayloadUpdate( uid=pbo.uid, revision=pbo.revision, writer_name=pbo.writer_name, is_delta=pbo.is_delta)
for entry in pbo.new_items:
k, v = unpack_typed_payload_item(entry)
iu_up.new_items[k] = v
iu_up.keys_to_remove = pbo.keys_to_remove[:]
return iu_up
else:
raise ValueError("Inacceptable dataType %s" % type)
#}}}
class IUStore(dict):
"""A dictionary storing IUs."""
def __init__(self):
super(IUStore, self).__init__()
class FrozenIUStore(IUStore):
"""A read-only version of a dictionary storing IUs. (TODO: might be slow)"""
def __init__(self, original_iu_store):
super(FrozenIUStore, self).__init__()
map(lambda p: super(FrozenIUStore, self).__setitem__(p[0], p[1]), original_iu_store.items())
def __delitem__(self, k):
raise AttributeError()
def __setitem__(self, k, v):
raise AttributeError()
class IUEventHandler(object):
"""Wrapper for IU event handling functions."""
def __init__(self, handler_function, for_event_types=None, for_categories=None):
"""Create an IUEventHandler.
Keyword arguments:
handler_function -- the handler function with the signature
(IU, event_type, local)
for_event_types -- a list of event types or None if handler should
be called for all event types
for_categories -- a list of category names or None if handler should
be called for all categoires
"""
super(IUEventHandler, self).__init__()
self._handler_function = handler_function
self._for_event_types = (
None if for_event_types is None else
(for_event_types[:] if hasattr(for_event_types, '__iter__') else [for_event_types]))
self._for_categories = (
None if for_categories is None else
(for_categories[:] if hasattr(for_categories, '__iter__') else [for_categories]))
def condition_met(self, event_type, category):
"""Check whether this IUEventHandler should be called.
Keyword arguments:
event_type -- type of the IU event
category -- category of the IU which triggered the event
"""
type_condition_met = (self._for_event_types is None or event_type in self._for_event_types)
cat_condition_met = (self._for_categories is None or category in self._for_categories)
return type_condition_met and cat_condition_met
def call(self, buffer, iu_uid, local, event_type, category):
"""Call this IUEventHandler's function, if it applies.
Keyword arguments:
buffer -- the buffer in which the IU is stored
iu_uid -- the uid of the IU
local -- is the IU local or remote to this component? @RAMIN: Is this correct?
event_type -- IU event type
category -- category of the IU
"""
if self.condition_met(event_type, category):
iu = buffer._iu_store[iu_uid]
self._handler_function(iu, event_type, local)
class Buffer(object):
"""Base class for InputBuffer and OutputBuffer."""
def __init__(self, owning_component_name, participant_config=None):
'''Create a Buffer.
Keyword arguments:
owning_compontent_name -- name of the entity that owns this Buffer
participant_config -- RSB configuration
'''
super(Buffer, self).__init__()
self._owning_component_name = owning_component_name
self._participant_config = rsb.ParticipantConfig.fromDefaultSources() if participant_config is None else participant_config
self._uuid = str(uuid.uuid4())[0:8]
# Initialise with a temporary, but already unique, name
self._unique_name = "undef-"+self._uuid
self._iu_store = IUStore()
self._iu_event_handlers = []
def _get_frozen_iu_store(self):
return FrozenIUStore(original_iu_store = self._iu_store)
iu_store = property(fget=_get_frozen_iu_store, doc='Copy-on-read version of the internal IU store')
def register_handler(self, handler_function, for_event_types=None, for_categories=None):
"""Register a new IU event handler function.
Keyword arguments:
handler_function -- a function with the signature (IU, event_type, local)
for_event_types -- a list of event types or None if handler should
be called for all event types
for_categories -- a list of category names or None if handler should
be called for all categoires
"""
handler = IUEventHandler(handler_function=handler_function, for_event_types=for_event_types, for_categories=for_categories)
self._iu_event_handlers.append(handler)
def call_iu_event_handlers(self, uid, local, event_type, category):
"""Call registered IU event handler functions registered for this event_type and category."""
for h in self._iu_event_handlers:
h.call(self, uid, local=local, event_type=event_type, category=category)
def _get_owning_component_name(self):
"""Return the name of this Buffer's owning component"""
return self._owning_component_name
owning_component_name = property(_get_owning_component_name)
def _get_unique_name(self):
"""Return the Buffer's unique name."""
return self._unique_name
unique_name = property(_get_unique_name)
class InputBuffer(Buffer):
"""An InputBuffer that holds remote IUs."""
def __init__(self, owning_component_name, category_interests=None, participant_config=None):
'''Create an InputBuffer.
Keyword arguments:
owning_compontent_name -- name of the entity that owns this InputBuffer
category_interests -- list of IU categories this Buffer is interested in
participant_config = RSB configuration
'''
super(InputBuffer, self).__init__(owning_component_name, participant_config)
self._unique_name = '/ipaaca/component/'+str(owning_component_name)+'ID'+self._uuid+'/IB'
self._listener_store = {} # one per IU category
self._remote_server_store = {} # one per remote-IU-owning Component
self._category_interests = []
if category_interests is not None:
for cat in category_interests:
self._add_category_listener(cat)
def _get_remote_server(self, iu):
'''Return (or create, store and return) a remote server.'''
if iu.owner_name in self._remote_server_store:
return self._remote_server_store[iu.owner_name]
# TODO remove the str() when unicode is supported (issue #490)
remote_server = rsb.createRemoteServer(rsb.Scope(str(iu.owner_name)))
self._remote_server_store[iu.owner_name] = remote_server
return remote_server
def _add_category_listener(self, iu_category):
'''Return (or create, store and return) a category listener.'''
if iu_category not in self._listener_store:
cat_listener = rsb.createListener(rsb.Scope("/ipaaca/category/"+str(iu_category)), config=self._participant_config)
cat_listener.addHandler(self._handle_iu_events)
self._listener_store[iu_category] = cat_listener
self._category_interests.append(iu_category)
logger.info("Added listener in scope "+"/ipaaca/category/"+iu_category)
def _handle_iu_events(self, event):
'''Dispatch incoming IU events.
Adds incoming IU's to the store, applies payload and commit updates to
IU, calls IU event handlers.'
Keyword arguments:
event -- a converted RSB event
'''
type_ = type(event.data)
if type_ is RemotePushIU:
# a new IU
if event.data.uid in self._iu_store:
# already in our store
pass
else:
self._iu_store[ event.data.uid ] = event.data
event.data.buffer = self
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.ADDED, category=event.data.category)
elif type_ is RemoteMessage:
# a new Message, an ephemeral IU that is removed after calling handlers
self._iu_store[ event.data.uid ] = event.data
event.data.buffer = self
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.MESSAGE, category=event.data.category)
del self._iu_store[ event.data.uid ]
else:
# an update to an existing IU
if event.data.uid not in self._iu_store:
# TODO: we should request the IU's owner to send us the IU
logger.warning("Update message for IU which we did not fully receive before.")
return
if type_ is ipaaca_pb2.IURetraction:
# IU retraction (cannot be triggered remotely)
iu = self._iu_store[event.data.uid]
iu._revision = event.data.revision
iu._apply_retraction() # for now - just sets the _rectracted flag.
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.RETRACTED, category=iu.category)
# SPECIAL CASE: allow the handlers (which will need to find the IU
# in the buffer) to operate on the IU - then delete it afterwards!
# FIXME: for now: retracted == deleted! Think about this later
del(self._iu_store[iu.uid])
else:
if event.data.writer_name == self.unique_name:
# Notify only for remotely triggered events;
# Discard updates that originate from this buffer
return
if type_ is ipaaca_pb2.IUCommission:
# IU commit
iu = self._iu_store[event.data.uid]
iu._apply_commission()
iu._revision = event.data.revision
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.COMMITTED, category=iu.category)
elif type_ is IUPayloadUpdate:
# IU payload update
iu = self._iu_store[event.data.uid]
iu._apply_update(event.data)
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.UPDATED, category=iu.category)
elif type_ is IULinkUpdate:
# IU link update
iu = self._iu_store[event.data.uid]
iu._apply_link_update(event.data)
self.call_iu_event_handlers(event.data.uid, local=False, event_type=IUEventType.LINKSUPDATED, category=iu.category)
else:
logger.warning('Warning: _handle_iu_events failed to handle an object of type '+str(type_))
def add_category_interests(self, category_interests):
for interest in category_interests:
self._add_category_listener(interest)
class OutputBuffer(Buffer):
"""An OutputBuffer that holds local IUs."""
def __init__(self, owning_component_name, participant_config=None):
'''Create an Output Buffer.
Keyword arguments:
owning_component_name -- name of the entity that own this buffer
participant_config -- RSB configuration
'''
super(OutputBuffer, self).__init__(owning_component_name, participant_config)
self._unique_name = '/ipaaca/component/' + str(owning_component_name) + 'ID' + self._uuid + '/OB'
self._server = rsb.createServer(rsb.Scope(self._unique_name))
self._server.addMethod('updateLinks', self._remote_update_links, IULinkUpdate, int)
self._server.addMethod('updatePayload', self._remote_update_payload, IUPayloadUpdate, int)
self._server.addMethod('commit', self._remote_commit, ipaaca_pb2.IUCommission, int)
self._informer_store = {}
self._id_prefix = str(owning_component_name)+'-'+str(self._uuid)+'-IU-'
self.__iu_id_counter_lock = threading.Lock()
#self.__iu_id_counter = 0 # hbuschme: IUs now have their Ids assigned on creation
def _create_own_name_listener(self, iu_category):
# FIXME replace this
'''Create an own name listener.'''
#if iu_category in self._listener_store: return self._informer_store[iu_category]
#cat_listener = rsb.createListener(rsb.Scope("/ipaaca/category/"+str(iu_category)), config=self._participant_config)
#cat_listener.addHandler(self._handle_iu_events)
#self._listener_store[iu_category] = cat_listener
#self._category_interests.append(iu_category)
#logger.info("Added category listener for "+iu_category)
#return cat_listener
# hbuschme: IUs now have their Ids assigned on creation
#def _generate_iu_uid(self):
# '''Generate a unique IU id of the form ????'''
# with self.__iu_id_counter_lock:
# self.__iu_id_counter += 1
# number = self.__iu_id_counter
# return self._id_prefix + str(number)
def _remote_update_links(self, update):
'''Apply a remotely requested update to one of the stored IU's links.'''
if update.uid not in self._iu_store:
logger.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(update.uid))
return 0
iu = self._iu_store[update.uid]
with iu.revision_lock:
if not OMIT_REVISION_CHECKS and (update.revision != 0) and (update.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
logger.warning("Remote write operation failed because request was out of date; IU "+str(update.uid))
return 0
if update.is_delta:
iu.modify_links(add=update.new_links, remove=update.links_to_remove, writer_name=update.writer_name)
else:
iu.set_links(links=update.new_links, writer_name=update.writer_name)
self.call_iu_event_handlers(update.uid, local=True, event_type=IUEventType.LINKSUPDATED, category=iu.category)
return iu.revision
def _remote_update_payload(self, update):
'''Apply a remotely requested update to one of the stored IU's payload.'''
if update.uid not in self._iu_store:
logger.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(update.uid))
return 0
iu = self._iu_store[update.uid]
with iu.revision_lock:
if not OMIT_REVISION_CHECKS and (update.revision != 0) and (update.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
logger.warning("Remote write operation failed because request was out of date; IU "+str(update.uid))
return 0
if update.is_delta:
for k in update.keys_to_remove:
iu.payload.__delitem__(k, writer_name=update.writer_name)
for k,v in update.new_items.items():
iu.payload.__setitem__(k, v, writer_name=update.writer_name)
else:
iu._set_payload(update.new_items, writer_name=update.writer_name)
# _set_payload etc. have also incremented the revision number
self.call_iu_event_handlers(update.uid, local=True, event_type=IUEventType.UPDATED, category=iu.category)
return iu.revision
def _remote_commit(self, iu_commission):
'''Apply a remotely requested commit to one of the stored IUs.'''
if iu_commission.uid not in self._iu_store:
logger.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(iu_commission.uid))
return 0
iu = self._iu_store[iu_commission.uid]
with iu.revision_lock:
if not OMIT_REVISION_CHECKS and (iu_commission.revision != 0) and (iu_commission.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
logger.warning("Remote write operation failed because request was out of date; IU "+str(iu_commission.uid))
return 0
if iu.committed:
return 0
else:
iu._internal_commit(writer_name=iu_commission.writer_name)
self.call_iu_event_handlers(iu_commission.uid, local=True, event_type=IUEventType.COMMITTED, category=iu.category)
return iu.revision
def _get_informer(self, iu_category):
'''Return (or create, store and return) an informer object for IUs of the specified category.'''
if iu_category in self._informer_store:
logger.info("Returning informer on scope "+"/ipaaca/category/"+str(iu_category))
return self._informer_store[iu_category]
informer_iu = rsb.createInformer(
rsb.Scope("/ipaaca/category/"+str(iu_category)),
config=self._participant_config,
dataType=object)
self._informer_store[iu_category] = informer_iu #new_tuple
logger.info("Returning NEW informer on scope "+"/ipaaca/category/"+str(iu_category))
return informer_iu #return new_tuple
def add(self, iu):
'''Add an IU to the IU store, assign an ID and publish it.'''
# hbuschme: IUs now have their Ids assigned on creation
#if iu._uid is not None:
# raise IUPublishedError(iu)
#iu.uid = self._generate_iu_uid()
if iu.uid in self._iu_store:
raise IUPublishedError(iu)
if iu.buffer is not None:
raise IUPublishedError(iu)
if iu.access_mode != IUAccessMode.MESSAGE:
# Messages are not really stored in the OutputBuffer
self._iu_store[iu.uid] = iu
iu.buffer = self
self._publish_iu(iu)
def remove(self, iu=None, iu_uid=None):
'''Remove the iu or an IU corresponding to iu_uid from the OutputBuffer, retracting it from the system.'''
if iu is None:
if iu_uid is None:
return None
else:
if iu_uid not in self. _iu_store:
raise IUNotFoundError(iu_uid)
iu = self._iu_store[iu_uid]
# unpublish the IU
self._retract_iu(iu)
del self._iu_store[iu.uid]
return iu
def _publish_iu(self, iu):
'''Publish an IU.'''
informer = self._get_informer(iu._category)
informer.publishData(iu)
def _retract_iu(self, iu):
'''Retract (unpublish) an IU.'''
iu_retraction = ipaaca_pb2.IURetraction()
iu_retraction.uid = iu.uid
iu_retraction.revision = iu.revision
informer = self._get_informer(iu._category)
informer.publishData(iu_retraction)
def _send_iu_commission(self, iu, writer_name):
'''Send IU commission.
Keyword arguments:
iu -- the IU that has been committed to
writer_name -- name of the Buffer that initiated this commit, necessary
to enable remote components to filter out updates that originated
from their own operations
'''
# a raw Protobuf object for IUCommission is produced
# (unlike updates, where we have an intermediate class)
iu_commission = ipaaca_pb2.IUCommission()
iu_commission.uid = iu.uid
iu_commission.revision = iu.revision
iu_commission.writer_name = iu.owner_name if writer_name is None else writer_name
informer = self._get_informer(iu._category)
informer.publishData(iu_commission)
def _send_iu_link_update(self, iu, is_delta, revision, new_links=None, links_to_remove=None, writer_name="undef"):
'''Send an IU link update.
Keyword arguments:
iu -- the IU being updated
is_delta -- whether this is an incremental update or a replacement
the whole link dictionary
revision -- the new revision number
new_links -- a dictionary of new link sets
links_to_remove -- a dict of the link sets that shall be removed
writer_name -- name of the Buffer that initiated this update, necessary
to enable remote components to filter out updates that originate d
from their own operations
'''
if new_links is None:
new_links = {}
if links_to_remove is None:
links_to_remove = {}
link_update = IULinkUpdate(iu._uid, is_delta=is_delta, revision=revision)
link_update.new_links = new_links
if is_delta:
link_update.links_to_remove = links_to_remove
link_update.writer_name = writer_name
informer = self._get_informer(iu._category)
informer.publishData(link_update)
# FIXME send the notification to the target, if the target is not the writer_name
def _send_iu_payload_update(self, iu, is_delta, revision, new_items=None, keys_to_remove=None, writer_name="undef"):
'''Send an IU payload update.
Keyword arguments:
iu -- the IU being updated
is_delta -- whether this is an incremental update or a replacement
revision -- the new revision number
new_items -- a dictionary of new payload items
keys_to_remove -- a list of the keys that shall be removed from the
payload
writer_name -- name of the Buffer that initiated this update, necessary
to enable remote components to filter out updates that originate d
from their own operations
'''
if new_items is None:
new_items = {}
if keys_to_remove is None:
keys_to_remove = []
payload_update = IUPayloadUpdate(iu._uid, is_delta=is_delta, revision=revision)
payload_update.new_items = new_items
if is_delta:
payload_update.keys_to_remove = keys_to_remove
payload_update.writer_name = writer_name
informer = self._get_informer(iu._category)
informer.publishData(payload_update)
## --- RSB -------------------------------------------------------------------
def initialize_ipaaca_rsb():#{{{
rsb.converter.registerGlobalConverter(
IntConverter(wireSchema="int32", dataType=int))
rsb.converter.registerGlobalConverter(
IUConverter(wireSchema="ipaaca-iu", dataType=IU))
rsb.converter.registerGlobalConverter(
IULinkUpdateConverter(
wireSchema="ipaaca-iu-link-update",
dataType=IULinkUpdate))
rsb.converter.registerGlobalConverter(
IUPayloadUpdateConverter(
wireSchema="ipaaca-iu-payload-update",
dataType=IUPayloadUpdate))
rsb.converter.registerGlobalConverter(
rsb.converter.ProtocolBufferConverter(
messageClass=ipaaca_pb2.IUCommission))
rsb.converter.registerGlobalConverter(
rsb.converter.ProtocolBufferConverter(
messageClass=ipaaca_pb2.IURetraction))
rsb.__defaultParticipantConfig = rsb.ParticipantConfig.fromDefaultSources()
#t = rsb.ParticipantConfig.Transport('spread', {'enabled':'true'})
#rsb.__defaultParticipantConfig = rsb.ParticipantConfig.fromFile('rsb.cfg')
#}}}
## --- Module initialisation -------------------------------------------------
# register our own RSB Converters
initialize_ipaaca_rsb()
# Create a global logger for this module
logger = logging.getLogger('ipaaca')
logger.addHandler(IpaacaLoggingHandler(level=logging.INFO))
*_pb2.py
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import os
import threading
#import rsb
#import rsb.converter
import ipaaca.ipaaca_pb2
import ipaaca.converter
from ipaaca.buffer import InputBuffer, OutputBuffer
from ipaaca.exception import *
from ipaaca.iu import IU, Message, IUAccessMode, IUEventType
from ipaaca.misc import enable_logging, IpaacaArgumentParser
from ipaaca.payload import Payload
import ipaaca.backend
#
# ipaaca.exit(int_retval)
#
from ipaaca.buffer import atexit_cleanup_function
def exit(int_retval=0):
'''For the time being, this function can be used to
circumvent any sys.exit blocks, while at the same time
cleaning up the buffers (e.g. retracting IUs).
Call once at the end of any python script (or anywhere
in lieu of sys.exit() / os._exit(). '''
print('ipaaca: cleaning up and exiting with code '+str(int_retval))
atexit_cleanup_function()
os._exit(int_retval)
__RSB_INITIALIZER_LOCK = threading.Lock()
__RSB_INITIALIZED = False
def initialize_ipaaca_rsb_if_needed():
"""Initialise rsb if not yet initialise.
* Register own RSB converters.
* Initialise RSB from enviroment variables, rsb config file, or
from default values for RSB trnasport, host, and port (via
ipaaca.defaults or ipaaca.misc.IpaacaArgumentParser).
"""
global __RSB_INITIALIZED
with __RSB_INITIALIZER_LOCK:
if __RSB_INITIALIZED:
return
else:
ipaaca.converter.register_global_converter(
ipaaca.converter.IUConverter(
wireSchema="ipaaca-iu",
dataType=IU))
ipaaca.converter.register_global_converter(
ipaaca.converter.MessageConverter(
wireSchema="ipaaca-messageiu",
dataType=Message))
ipaaca.converter.register_global_converter(
ipaaca.converter.IULinkUpdateConverter(
wireSchema="ipaaca-iu-link-update",
dataType=converter.IULinkUpdate))
ipaaca.converter.register_global_converter(
ipaaca.converter.IUPayloadUpdateConverter(
wireSchema="ipaaca-iu-payload-update",
dataType=converter.IUPayloadUpdate))
if ipaaca.defaults.IPAACA_DEFAULT_RSB_TRANSPORT is not None:
if ipaaca.defaults.IPAACA_DEFAULT_RSB_TRANSPORT == 'spread':
os.environ['RSB_TRANSPORT_SPREAD_ENABLED'] = str(1)
os.environ['RSB_TRANSPORT_SOCKET_ENABLED'] = str(0)
elif ipaaca.defaults.IPAACA_DEFAULT_RSB_TRANSPORT == 'socket':
os.environ['RSB_TRANSPORT_SPREAD_ENABLED'] = str(0)
os.environ['RSB_TRANSPORT_SOCKET_ENABLED'] = str(1)
if ipaaca.defaults.IPAACA_DEFAULT_RSB_SOCKET_SERVER is not None:
os.environ['RSB_TRANSPORT_SOCKET_SERVER'] = str(
ipaaca.defaults.IPAACA_DEFAULT_RSB_SOCKET_SERVER)
if ipaaca.defaults.IPAACA_DEFAULT_RSB_HOST is not None:
os.environ['RSB_TRANSPORT_SPREAD_HOST'] = str(
ipaaca.defaults.IPAACA_DEFAULT_RSB_HOST)
os.environ['RSB_TRANSPORT_SOCKET_HOST'] = str(
ipaaca.defaults.IPAACA_DEFAULT_RSB_HOST)
if ipaaca.defaults.IPAACA_DEFAULT_RSB_PORT is not None:
os.environ['RSB_TRANSPORT_SPREAD_PORT'] = str(
ipaaca.defaults.IPAACA_DEFAULT_RSB_PORT)
os.environ['RSB_TRANSPORT_SOCKET_PORT'] = str(
ipaaca.defaults.IPAACA_DEFAULT_RSB_PORT)
#
ipaaca.backend.register_backends()
__RSB_INITIALIZED = True
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import ipaaca.defaults
import ipaaca.exception
import ipaaca.iu
import ipaaca.misc
import ipaaca.converter
import threading
import uuid
import os
import time
LOGGER = ipaaca.misc.get_library_logger()
__registered_backends = {}
__backend_registration_done = False
def register_backends():
global __registered_backends
global __backend_registration_done
if not __backend_registration_done:
__backend_registration_done = True
LOGGER.debug('Registering available back-ends')
# register available backends
# mqtt
import ipaaca.backend_mqtt
be = ipaaca.backend_mqtt.create_backend()
if be is not None:
__registered_backends[be.name] = be
LOGGER.debug('Back-end '+str(be.name)+' added')
# ros
import ipaaca.backend_ros
be = ipaaca.backend_ros.create_backend()
if be is not None:
__registered_backends[be.name] = be
LOGGER.debug('Back-end '+str(be.name)+' added')
def get_default_backend():
# TODO selection mechanism / config
if not __backend_registration_done:
register_backends()
if len(__registered_backends) == 0:
raise RuntimeError('No back-ends could be initialized for ipaaca-python')
cfg = ipaaca.config.get_global_config()
preferred = cfg.get_with_default('backend', None)
if preferred is None:
k = list(__registered_backends.keys())[0]
if len(__registered_backends) > 1:
LOGGER.warning('No preferred ipaaca.backend set, returning one of several (probably the first in list)')
print('Using randomly selected back-end {}!'.format(k))
else:
if preferred in __registered_backends:
k = preferred
else:
raise ipaaca.exception.BackendInitializationError(preferred)
LOGGER.info('Back-end is '+str(k))
return __registered_backends[k]
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import collections
import ipaaca.ipaaca_pb2
import ipaaca.defaults
import ipaaca.exception
import ipaaca.iu
import ipaaca.misc
import ipaaca.converter
import ipaaca.backend
import ipaaca.config
import threading
try:
import queue
except:
import Queue as queue
import uuid
import os
import time
try:
import paho.mqtt.client as mqtt
MQTT_ENABLED = True
except:
MQTT_ENABLED = False
if not MQTT_ENABLED:
def create_backend():
return None
else:
def create_backend():
return MQTTBackend(name='mqtt')
LOGGER = ipaaca.misc.get_library_logger()
_REMOTE_SERVER_MAX_QUEUED_REQUESTS = -1 # unlimited
_REMOTE_LISTENER_MAX_QUEUED_EVENTS = 1024 # 'Full' exception if exceeded
class EventWrapper(object):
def __init__(self, data):
self.data = data
class PendingRequest(object):
'''Encapsulation of a pending remote request with
a facility to keep the requesting thread locked
until the reply or a timeout unlocks it.'''
def __init__(self, request):
self._request = request
self._event = threading.Event()
self._reply = None
self._request_uid = str(uuid.uuid4())[0:8]
def wait_for_reply(self, timeout=30.0):
wr = self._event.wait(timeout)
return None if wr is False else self._reply
def reply_with_result(self, reply):
self._reply = reply
self._event.set()
class Informer(object):
'''Informer interface, wrapping an outbound port to MQTT'''
def __init__(self, scope, config=None):
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._handlers = []
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._mqtt_client = mqtt.Client(self._client_id)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = int(config.get_with_default('transport.mqtt.port', 1883, warn=True))
self._mqtt_client.on_connect = self.mqtt_callback_on_connect
self._mqtt_client.on_disconnect = self.mqtt_callback_on_disconnect
self._mqtt_client.on_message = self.mqtt_callback_on_message
self._mqtt_client.on_subscribe = self.mqtt_callback_on_subscribe
#self._mqtt_client.on_publish = self.mqtt_callback_on_publish
self.run_in_background()
def deactivate(self):
pass
def deactivate_internal(self):
self._mqtt_client.disconnect()
self._mqtt_client = None
def run_in_background(self):
if not self._running:
self._running = True
self._mqtt_client.loop_start()
self._mqtt_client.connect(self._host, self._port)
def mqtt_callback_on_connect(self, client, userdata, flags, rc):
if rc > 0:
LOGGER.warning('MQTT connect failed, result code ' + str(rc))
else:
self._live = True
self._live_event.set()
def mqtt_callback_on_subscribe(self, client, userdata, mid, granted_qos):
# TODO should / could track how many / which topics have been granted
if any(q != 2 for q in granted_qos):
LOGGER.warning('MQTT subscription did not obtain QoS level 2')
def mqtt_callback_on_disconnect(self, client, userdata, rc):
LOGGER.warning('MQTT disconnect for '+str(self._scope)+' with result code '+str(rc))
def mqtt_callback_on_message(self, client, userdata, message):
pass
def publishData(self, data):
#print('Informer publishing '+str(data.__class__.__name__)+' on '+self._scope)
self._mqtt_client.publish(self._scope, ipaaca.converter.serialize(data), qos=2)
class BackgroundEventDispatcher(threading.Thread):
def __init__(self, listener):
super(BackgroundEventDispatcher, self).__init__()
self.daemon = True
self._listener = listener
def terminate(self):
self._running = False
def run(self):
self._running = True
listener = self._listener
while self._running: # auto-terminated (daemon)
event = listener._event_queue.get(block=True, timeout=None)
if event is None: return # signaled termination
#print('\033[31mDispatch '+str(event.data.__class__.__name__)+' start ...\033[m')
for handler in self._listener._handlers:
handler(event)
#print('\033[32m... dispatch '+str(event.data.__class__.__name__)+' end.\033[m')
class Listener(object):
'''Listener interface, wrapping an inbound port from MQTT'''
def __init__(self, scope, config=None):
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._handlers = []
self._event_queue = queue.Queue(_REMOTE_LISTENER_MAX_QUEUED_EVENTS)
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._mqtt_client = mqtt.Client(self._client_id)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = int(config.get_with_default('transport.mqtt.port', 1883, warn=True))
self._mqtt_client.on_connect = self.mqtt_callback_on_connect
self._mqtt_client.on_disconnect = self.mqtt_callback_on_disconnect
self._mqtt_client.on_message = self.mqtt_callback_on_message
self._mqtt_client.on_subscribe = self.mqtt_callback_on_subscribe
#self._mqtt_client.on_socket_open = self.mqtt_callback_on_socket_open
#self._mqtt_client.on_socket_close = self.mqtt_callback_on_socket_close
#self._mqtt_client.on_log = self.mqtt_callback_on_log
#self._mqtt_client.on_publish = self.mqtt_callback_on_publish
self._dispatcher = BackgroundEventDispatcher(self)
self._dispatcher.start()
self.run_in_background()
def deactivate(self):
pass
def deactivate_internal(self):
self._event_queue.put(None, block=False) # signal termination, waking queue
self._dispatcher.terminate()
self._mqtt_client.disconnect()
self._mqtt_client = None
def run_in_background(self):
if not self._running:
self._running = True
self._mqtt_client.loop_start()
LOGGER.debug('Connect to '+str(self._host)+':'+str(self._port))
self._mqtt_client.connect(self._host, self._port)
#def mqtt_callback_on_log(self, client, userdata, level, buf):
# print('Listener: LOG: '+str(buf))
def mqtt_callback_on_connect(self, client, userdata, flags, rc):
if rc > 0:
LOGGER.warning('MQTT connect failed, result code ' + str(rc))
else:
self._mqtt_client.subscribe(self._scope, qos=2)
def mqtt_callback_on_subscribe(self, client, userdata, mid, granted_qos):
# TODO should / could track how many / which topics have been granted
if any(q != 2 for q in granted_qos):
LOGGER.warning('MQTT subscription did not obtain QoS level 2')
self._live = True
self._live_event.set()
def mqtt_callback_on_disconnect(self, client, userdata, rc):
LOGGER.warning('MQTT disconnect for '+str(self._scope)+' with result code '+str(rc))
def mqtt_callback_on_message(self, client, userdata, message):
event = EventWrapper(ipaaca.converter.deserialize(message.payload))
self._event_queue.put(event, block=False) # queue event for BackgroundEventDispatcher
def addHandler(self, handler):
self._handlers.append(handler)
#def publishData(self, data):
# self._mqtt_client.publish(self._
class LocalServer(object):
'''LocalServer interface, allowing for RPC requests to
IU functions, or reporting back success or failure.'''
def __init__(self, buffer_impl, scope, config=None):
self._buffer = buffer_impl
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._pending_requests_lock = threading.Lock()
self._pending_requests = {}
self._uuid = str(uuid.uuid4())[0:8]
self._name = 'PID_' + str(os.getpid()) + '_LocalServer_' + self._uuid # unused atm
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._mqtt_client = mqtt.Client(self._client_id)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = int(config.get_with_default('transport.mqtt.port', 1883, warn=True))
self._mqtt_client.on_connect = self.mqtt_callback_on_connect
self._mqtt_client.on_disconnect = self.mqtt_callback_on_disconnect
self._mqtt_client.on_message = self.mqtt_callback_on_message
self._mqtt_client.on_subscribe = self.mqtt_callback_on_subscribe
#self._mqtt_client.on_publish = self.mqtt_callback_on_publish
self.run_in_background()
def deactivate(self):
pass
def deactivate_internal(self):
self._mqtt_client.disconnect()
self._mqtt_client = None
def run_in_background(self):
if not self._running:
self._running = True
self._mqtt_client.loop_start()
self._mqtt_client.connect(self._host, self._port)
def mqtt_callback_on_connect(self, client, userdata, flags, rc):
if rc > 0:
LOGGER.warning('MQTT connect failed, result code ' + str(rc))
else:
self._mqtt_client.subscribe(self._scope, qos=2)
def mqtt_callback_on_subscribe(self, client, userdata, mid, granted_qos):
# TODO should / could track how many / which topics have been granted
if any(q != 2 for q in granted_qos):
LOGGER.warning('MQTT subscription did not obtain QoS level 2')
self._live = True
self._live_event.set()
def mqtt_callback_on_disconnect(self, client, userdata, rc):
LOGGER.warning('MQTT disconnect for '+str(self._scope)+' with result code '+str(rc))
def mqtt_callback_on_message(self, client, userdata, message):
req = ipaaca.converter.deserialize(message.payload)
result = None
if isinstance(req, ipaaca.converter.IUPayloadUpdate):
result = self.attempt_to_apply_remote_updatePayload(req)
elif isinstance(req, ipaaca.converter.IULinkUpdate):
result = self.attempt_to_apply_remote_updateLinks(req)
elif isinstance(req, ipaaca.ipaaca_pb2.IUCommission):
result = self.attempt_to_apply_remote_commit(req)
elif isinstance(req, ipaaca.ipaaca_pb2.IUResendRequest):
result = self.attempt_to_apply_remote_resendRequest(req)
else:
raise RuntimeError('LocalServer: got an object of wrong class '+str(req.__class__.__name__)) # TODO replace
if result is not None:
self.send_result_for_request(req, result)
#
def send_result_for_request(self, obj, result):
pbo = ipaaca.ipaaca_pb2.RemoteRequestResult()
pbo.result = result
pbo.request_uid = obj.request_uid
#print('Sending result to endpoint '+str(obj.request_endpoint))
self._mqtt_client.publish(obj.request_endpoint, ipaaca.converter.serialize(pbo), qos=2)
def attempt_to_apply_remote_updateLinks(self, obj):
return self._buffer._remote_update_links(obj)
def attempt_to_apply_remote_updatePayload(self, obj):
return self._buffer._remote_update_payload(obj)
def attempt_to_apply_remote_commit(self, obj):
return self._buffer._remote_commit(obj)
def attempt_to_apply_remote_resendRequest(self, obj):
return self._buffer._remote_request_resend(obj)
class RemoteServer(object):
'''RemoteServer, connects to a LocalServer on the side
of an actual IU owner, which will process any requests.
The RemoteServer is put on hold while the owner is
processing. RemoteServer is from RSB terminology,
it might more aptly be described as an RPC client.'''
def __init__(self, remote_end_scope, config=None):
self._running = False
self._live = False
self._live_event = threading.Event()
self._pending_requests_lock = threading.Lock()
self._pending_requests = {}
#queue.Queue(_REMOTE_SERVER_MAX_QUEUED_REQUESTS)
self._uuid = str(uuid.uuid4())[0:8]
self._name = 'PID_' + str(os.getpid()) + '_RemoteServer_' + self._uuid
# will RECV here:
self._scope = '/ipaaca/remotes/' + self._name
# will SEND here
self._remote_end_scope = remote_end_scope
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + remote_end_scope
self._mqtt_client = mqtt.Client(self._client_id)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = int(config.get_with_default('transport.mqtt.port', 1883, warn=True))
self._mqtt_client.on_connect = self.mqtt_callback_on_connect
self._mqtt_client.on_disconnect = self.mqtt_callback_on_disconnect
self._mqtt_client.on_message = self.mqtt_callback_on_message
self._mqtt_client.on_subscribe = self.mqtt_callback_on_subscribe
#self._mqtt_client.on_publish = self.mqtt_callback_on_publish
self.run_in_background()
def deactivate(self):
pass
def deactivate_internal(self):
self._mqtt_client.disconnect()
self._mqtt_client = None
def run_in_background(self):
if not self._running:
self._running = True
self._mqtt_client.loop_start()
self._mqtt_client.connect(self._host, self._port)
def mqtt_callback_on_connect(self, client, userdata, flags, rc):
if rc > 0:
LOGGER.warning('MQTT connect failed, result code ' + str(rc))
else:
self._mqtt_client.subscribe(self._scope, qos=2)
def mqtt_callback_on_subscribe(self, client, userdata, mid, granted_qos):
# TODO should / could track how many / which topics have been granted
if any(q != 2 for q in granted_qos):
LOGGER.warning('MQTT subscription did not obtain QoS level 2')
self._live = True
self._live_event.set()
def mqtt_callback_on_disconnect(self, client, userdata, rc):
LOGGER.warning('MQTT disconnect for '+str(self._scope)+' with result code '+str(rc))
def mqtt_callback_on_message(self, client, userdata, message):
reply = ipaaca.converter.deserialize(message.payload)
if isinstance(reply, ipaaca.ipaaca_pb2.RemoteRequestResult):
uid = reply.request_uid
pending_request = None
with self._pending_requests_lock:
if uid in self._pending_requests:
pending_request = self._pending_requests[uid]
del self._pending_requests[uid]
if pending_request is None:
raise RuntimeError('RemoteServer: got a reply for request uid that is not queued: '+str(uid))
else:
# provide result to other thread and unblock it
pending_request.reply_with_result(reply)
else:
raise RuntimeError('RemoteServer: got an object of wrong class '+str(reply.__class__.__name__)) # TODO replace
def queue_pending_request(self, request):
pending_request = PendingRequest(request)
with self._pending_requests_lock:
if _REMOTE_SERVER_MAX_QUEUED_REQUESTS>0 and len(self._pending_requests) >= _REMOTE_SERVER_MAX_QUEUED_REQUESTS:
raise RuntimeError('RemoteServer: maximum number of pending requests exceeded') # TODO replace?
else:
self._pending_requests[pending_request._request_uid] = pending_request
return pending_request
# impl
def blocking_call(self, request):
# Broker's queue will raise before sending anything if capacity is exceeded
pending_request = self.queue_pending_request(request)
# complete and send request
request.request_uid = pending_request._request_uid
request.request_endpoint = self._scope
self._mqtt_client.publish(self._remote_end_scope, ipaaca.converter.serialize(request), qos=2)
# wait for other end to return result
reply = pending_request.wait_for_reply()
if reply is None:
LOGGER.warning('A request timed out!')
return 0
else:
return reply.result # the actual int result
# glue that quacks like the RSB version
def resendRequest(self, req):
return self.blocking_call(req)
def commit(self, req):
return self.blocking_call(req)
def updatePayload(self, req):
return self.blocking_call(req)
def updateLinks(self, req):
return self.blocking_call(req)
class MQTTBackend(object):
def __init__(self, name='mqtt'):
# back-end initialization code
self._config = ipaaca.config.get_global_config()
self._name = name
self._participants = set()
def _get_name(self):
return self._name
name = property(_get_name)
def teardown(self):
LOGGER.info('MQTT teardown: waiting 1 sec for final deliveries')
time.sleep(1)
for p in self._participants:
p.deactivate_internal()
def Scope(self, scope_str):
'''Scope adapter (glue replacing rsb.Scope)'''
return str(scope_str)
def createLocalServer(self, buffer_impl, scope, config=None):
LOGGER.debug('Creating a LocalServer on '+str(scope))
s = LocalServer(buffer_impl, scope, self._config if config is None else config)
self._participants.add(s)
s._live_event.wait(30.0)
return s
def createRemoteServer(self, scope, config=None):
LOGGER.debug('Creating a RemoteServer on '+str(scope))
s = RemoteServer(scope, self._config if config is None else config)
self._participants.add(s)
s._live_event.wait(30.0)
return s
def createInformer(self, scope, config=None, dataType="ignored in this backend"):
LOGGER.debug('Creating an Informer on '+str(scope))
s = Informer(scope, self._config if config is None else config)
self._participants.add(s)
s._live_event.wait(30.0)
return s
def createListener(self, scope, config=None):
LOGGER.debug('Creating a Listener on '+str(scope))
s = Listener(scope, self._config if config is None else config)
self._participants.add(s)
s._live_event.wait(30.0)
return s
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import collections
import sys
import ipaaca.ipaaca_pb2
import ipaaca.defaults
import ipaaca.exception
import ipaaca.iu
import ipaaca.misc
import ipaaca.converter
import ipaaca.backend
import ipaaca.config as config
LOGGER = ipaaca.misc.get_library_logger()
ROS_ENABLED, __try_guessing = False, False
try:
import rospy
from std_msgs.msg import String
import base64
ROS_ENABLED = True
except:
LOGGER.debug('rospy or deps not found, ROS backend disabled')
ROS_ENABLED = False
if not ROS_ENABLED:
def create_backend():
return None
else:
def create_backend():
return ROSBackend(name='ros')
import threading
try:
import queue
except:
import Queue as queue
import uuid
import os
import time
import sys
class EventWrapper(object):
def __init__(self, data):
self.data = data
class PendingRequest(object):
'''Encapsulation of a pending remote request with
a facility to keep the requesting thread locked
until the reply or a timeout unlocks it.'''
def __init__(self, request):
self._request = request
self._event = threading.Event()
self._reply = None
self._request_uid = str(uuid.uuid4())[0:8]
def wait_for_reply(self, timeout=30.0):
wr = self._event.wait(timeout)
return None if wr is False else self._reply
def reply_with_result(self, reply):
self._reply = reply
self._event.set()
class Informer(object):
'''Informer interface, wrapping an outbound port to ROS'''
def __init__(self, scope, config=None):
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._handlers = []
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._ros_pub = rospy.Publisher(self._scope, String, queue_size=100, tcp_nodelay=True, latch=True)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = config.get_with_default('transport.mqtt.port', 1883, warn=True)
def deactivate(self):
pass
#self._ros_pub.unregister()
#self._ros_pub = None
def publishData(self, data):
self._ros_pub.publish(ROSBackend.serialize(data))
class BackgroundEventDispatcher(threading.Thread):
def __init__(self, event, handlers):
super(BackgroundEventDispatcher, self).__init__()
self.daemon = True
self._event = event
self._handlers = handlers
def run(self):
for handler in self._handlers:
handler(self._event)
class Listener(object):
'''Listener interface, wrapping an inbound port from ROS'''
def __init__(self, scope, config=None):
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._handlers = []
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._ros_sub = rospy.Subscriber(self._scope, String, self.on_message, tcp_nodelay=True)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = config.get_with_default('transport.mqtt.port', 1883, warn=True)
def deactivate(self):
pass
#self._ros_sub.unregister()
#self._ros_sub = None
def on_message(self, message):
event = EventWrapper(ROSBackend.deserialize(message.data))
## (1) with extra thread:
#dispatcher = BackgroundEventDispatcher(event, self._handlers)
#dispatcher.start()
## or (2) with no extra thread:
for handler in self._handlers:
handler(event)
def addHandler(self, handler):
self._handlers.append(handler)
class LocalServer(object):
'''LocalServer interface, allowing for RPC requests to
IU functions, or reporting back success or failure.'''
def __init__(self, buffer_impl, scope, config=None):
self._buffer = buffer_impl
self._scope = scope
self._running = False
self._live = False
self._live_event = threading.Event()
self._pending_requests_lock = threading.Lock()
self._pending_requests = {}
self._uuid = str(uuid.uuid4())[0:8]
self._name = 'PID_' + str(os.getpid()) + '_LocalServer_' + self._uuid # unused atm
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + scope
self._ros_pubs = {}
self._ros_sub = rospy.Subscriber(self._scope, String, self.on_message, tcp_nodelay=True)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = config.get_with_default('transport.mqtt.port', 1883, warn=True)
def get_publisher(self, endpoint):
if endpoint in self._ros_pubs:
return self._ros_pubs[endpoint]
else:
p = rospy.Publisher(endpoint, String, queue_size=10, tcp_nodelay=True, latch=True)
self._ros_pubs[endpoint] = p
return p
def deactivate(self):
pass
#self._ros_sub.unregister()
#for v in self._ros_pubs.values():
# v.unregister()
#self._ros_sub = None
#self._ros_pubs = {}
def on_message(self, message):
req = ROSBackend.deserialize(message.data)
result = None
if isinstance(req, ipaaca.converter.IUPayloadUpdate):
result = self.attempt_to_apply_remote_updatePayload(req)
elif isinstance(req, ipaaca.converter.IULinkUpdate):
result = self.attempt_to_apply_remote_updateLinks(req)
elif isinstance(req, ipaaca.ipaaca_pb2.IUCommission):
result = self.attempt_to_apply_remote_commit(req)
elif isinstance(req, ipaaca.ipaaca_pb2.IUResendRequest):
result = self.attempt_to_apply_remote_resendRequest(req)
else:
raise RuntimeError('LocalServer: got an object of wrong class '+str(req.__class__.__name__)) # TODO replace
if result is not None:
self.send_result_for_request(req, result)
#
def send_result_for_request(self, obj, result):
pbo = ipaaca.ipaaca_pb2.RemoteRequestResult()
pbo.result = result
pbo.request_uid = obj.request_uid
#print('Sending result to endpoint '+str(obj.request_endpoint))
pub = self.get_publisher(obj.request_endpoint)
pub.publish(ROSBackend.serialize(pbo))
def attempt_to_apply_remote_updateLinks(self, obj):
return self._buffer._remote_update_links(obj)
def attempt_to_apply_remote_updatePayload(self, obj):
return self._buffer._remote_update_payload(obj)
def attempt_to_apply_remote_commit(self, obj):
return self._buffer._remote_commit(obj)
def attempt_to_apply_remote_resendRequest(self, obj):
return self._buffer._remote_request_resend(obj)
_REMOTE_SERVER_MAX_QUEUED_REQUESTS = -1 # unlimited
class RemoteServer(object):
'''RemoteServer, connects to a LocalServer on the side
of an actual IU owner, which will process any requests.
The RemoteServer is put on hold while the owner is
processing. RemoteServer is from RSB terminology,
it might more aptly be described as an RPC client.'''
def __init__(self, remote_end_scope, config=None):
self._running = False
self._live = False
self._live_event = threading.Event()
self._pending_requests_lock = threading.Lock()
self._pending_requests = {}
#queue.Queue(_REMOTE_SERVER_MAX_QUEUED_REQUESTS)
self._uuid = str(uuid.uuid4())[0:8]
self._name = 'PID_' + str(os.getpid()) + '_RemoteServer_' + self._uuid
# will RECV here:
self._scope = '/ipaaca/remotes/' + self._name
# will SEND here
self._remote_end_scope = remote_end_scope
#
self._client_id = '%s.%s_%s'%(self.__module__, self.__class__.__name__, str(uuid.uuid4())[0:8])
self._client_id += '_' + remote_end_scope
self._ros_pub = rospy.Publisher(self._remote_end_scope, String, queue_size=10, tcp_nodelay=True, latch=True)
self._ros_sub = rospy.Subscriber(self._scope, String, self.on_message, tcp_nodelay=True)
self._host = config.get_with_default('transport.mqtt.host', 'localhost', warn=True)
self._port = config.get_with_default('transport.mqtt.port', 1883, warn=True)
def deactivate(self):
pass
#self._ros_sub.unregister()
#self._ros_pub.unregister()
#self._ros_sub = None
#self._ros_pub = None
def on_message(self, message):
reply = ROSBackend.deserialize(message.data)
if isinstance(reply, ipaaca.ipaaca_pb2.RemoteRequestResult):
uid = reply.request_uid
pending_request = None
with self._pending_requests_lock:
if uid in self._pending_requests:
pending_request = self._pending_requests[uid]
del self._pending_requests[uid]
if pending_request is None:
raise RuntimeError('RemoteServer: got a reply for request uid that is not queued: '+str(uid))
else:
# provide result to other thread and unblock it
pending_request.reply_with_result(reply)
else:
raise RuntimeError('RemoteServer: got an object of wrong class '+str(reply.__class__.__name__)) # TODO replace
def queue_pending_request(self, request):
pending_request = PendingRequest(request)
with self._pending_requests_lock:
if _REMOTE_SERVER_MAX_QUEUED_REQUESTS>0 and len(self._pending_requests) >= _REMOTE_SERVER_MAX_QUEUED_REQUESTS:
raise RuntimeError('RemoteServer: maximum number of pending requests exceeded') # TODO replace?
else:
self._pending_requests[pending_request._request_uid] = pending_request
return pending_request
# impl
def blocking_call(self, request):
# Broker's queue will raise before sending anything if capacity is exceeded
pending_request = self.queue_pending_request(request)
# complete and send request
request.request_uid = pending_request._request_uid
request.request_endpoint = self._scope
self._ros_pub.publish(ROSBackend.serialize(request))
# wait for other end to return result
reply = pending_request.wait_for_reply()
if reply is None:
LOGGER.warning('A request timed out!')
return 0
else:
return reply.result # the actual int result
# glue that quacks like the RSB version
def resendRequest(self, req):
return self.blocking_call(req)
def commit(self, req):
return self.blocking_call(req)
def updatePayload(self, req):
return self.blocking_call(req)
def updateLinks(self, req):
return self.blocking_call(req)
class ROSBackend(object):
def __init__(self, name='ros'):
#import logging
# back-end initialization code
self._name = name
self._need_init = True
#logging.basicConfig(level=logging.DEBUG)
def init_once(self):
'''Actual back-end initialization is only done when it is used'''
if self._need_init:
self._need_init = False
self._config = config.get_global_config()
try:
# generate a ROS node prefix from the basename of argv[0]
clean_name = ''.join([c for c in sys.argv[0].rsplit('/',1)[-1].replace('.', '_').replace('-','_') if c.lower() in 'abcdefghijklmnoprqstuvwxzy0123456789_'])
except:
clean_name = ''
rospy.init_node('ipaaca_python' if len(clean_name)==0 else clean_name,
anonymous=True, disable_signals=True)
def _get_name(self):
return self._name
name = property(_get_name)
def teardown(self):
LOGGER.info('ROS teardown: waiting 1 sec for final deliveries')
time.sleep(1)
rospy.signal_shutdown('Done')
@staticmethod
def serialize(obj):
#print('object class: '+obj.__class__.__name__)
bb = ipaaca.converter.serialize(obj)
st = str(base64.b64encode(bb))
#print('serialized: '+str(st))
return st
@staticmethod
def deserialize(msg):
#print('got serialized: '+str(msg))
bb = base64.b64decode(msg)
return ipaaca.converter.deserialize(bb)
def Scope(self, scope_str):
'''Scope adapter (glue replacing rsb.Scope)'''
# ROS graph resources must not start with a slash
return str(scope_str)[1:] if scope_str.startswith('/') else str(scope_str)
def createLocalServer(self, buffer_impl, scope, config=None):
self.init_once()
LOGGER.debug('Creating a LocalServer on '+str(scope))
LOGGER.debug(' from thread '+threading.current_thread().name)
s = LocalServer(buffer_impl, scope, self._config if config is None else config)
#s._live_event.wait(30.0)
return s
def createRemoteServer(self, scope, config=None):
self.init_once()
LOGGER.debug('Creating a RemoteServer on '+str(scope))
LOGGER.debug(' from thread '+threading.current_thread().name)
s = RemoteServer(scope, self._config if config is None else config)
#s._live_event.wait(30.0)
return s
def createInformer(self, scope, config=None, dataType="ignored in this backend"):
self.init_once()
LOGGER.debug('Creating an Informer on '+str(scope))
LOGGER.debug(' from thread '+threading.current_thread().name)
s = Informer(scope, self._config if config is None else config)
#s._live_event.wait(30.0)
return s
def createListener(self, scope, config=None):
self.init_once()
LOGGER.debug('Creating a Listener on '+str(scope))
LOGGER.debug(' from thread '+threading.current_thread().name)
s = Listener(scope, self._config if config is None else config)
#s._live_event.wait(30.0)
return s
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import threading
import uuid
import traceback
import six
import weakref
import atexit
#import rsb
import ipaaca.ipaaca_pb2
import ipaaca.defaults
import ipaaca.exception
import ipaaca.converter
import ipaaca.iu
import ipaaca.backend
__all__ = [
'InputBuffer',
'OutputBuffer',
]
LOGGER = ipaaca.misc.get_library_logger()
# set of objects to auto-clean on exit, assumes _teardown() method
TEARDOWN_OBJECTS = set()
def atexit_cleanup_function():
'''Function to call at program exit to auto-clean objects.'''
global TEARDOWN_OBJECTS
for obj_r in TEARDOWN_OBJECTS:
obj = obj_r()
if obj is not None: # if weakref still valid
obj._teardown()
ipaaca.backend.get_default_backend().teardown()
atexit.register(atexit_cleanup_function)
def auto_teardown_instances(fn):
'''Decorator function for object constructors, to add
new instances to the object set to auto-clean at exit.'''
def auto_teardown_instances_wrapper(instance, *args, **kwargs):
global TEARDOWN_OBJECTS
fn(instance, *args, **kwargs)
TEARDOWN_OBJECTS.add(weakref.ref(instance))
return auto_teardown_instances_wrapper
class IUStore(dict):
"""A dictionary storing IUs."""
def __init__(self):
super(IUStore, self).__init__()
class FrozenIUStore(IUStore):
"""A read-only version of a dictionary storing IUs. (TODO: might be slow)"""
def __init__(self, original_iu_store):
super(FrozenIUStore, self).__init__()
map(lambda p: super(FrozenIUStore, self).__setitem__(p[0], p[1]), original_iu_store.items())
def __delitem__(self, k):
raise AttributeError()
def __setitem__(self, k, v):
raise AttributeError()
class IUEventHandler(object):
"""Wrapper for IU event handling functions."""
def __init__(self, handler_function, for_event_types=None, for_categories=None):
"""Create an IUEventHandler.
Keyword arguments:
handler_function -- the handler function with the signature
(IU, event_type, local)
for_event_types -- a list of event types or None if handler should
be called for all event types
for_categories -- a list of category names or None if handler should
be called for all categoires
"""
super(IUEventHandler, self).__init__()
self._handler_function = handler_function
self._for_event_types = (
None if for_event_types is None else
(for_event_types[:] if not isinstance(for_event_types, six.string_types) and hasattr(for_event_types, '__iter__') else [for_event_types]))
self._for_categories = (
None if for_categories is None else
(for_categories[:] if not isinstance(for_categories, six.string_types) and hasattr(for_categories, '__iter__') else [for_categories]))
def condition_met(self, event_type, category):
"""Check whether this IUEventHandler should be called.
Keyword arguments:
event_type -- type of the IU event
category -- category of the IU which triggered the event
"""
type_condition_met = (self._for_event_types is None or event_type in self._for_event_types)
cat_condition_met = (self._for_categories is None or category in self._for_categories)
return type_condition_met and cat_condition_met
def call(self, buffer, iu_uid, local, event_type, category):
"""Call this IUEventHandler's function, if it applies.
Keyword arguments:
buffer -- the buffer in which the IU is stored
iu_uid -- the uid of the IU
local -- is the IU local or remote to this component? @RAMIN: Is this correct?
event_type -- IU event type
category -- category of the IU
"""
if self.condition_met(event_type, category):
iu = buffer._iu_store[iu_uid]
self._handler_function(iu, event_type, local)
class Buffer(object):
"""Base class for InputBuffer and OutputBuffer."""
def __init__(self, owning_component_name, channel=None, participant_config=None):
'''Create a Buffer.
Keyword arguments:
owning_compontent_name -- name of the entity that owns this Buffer
participant_config -- RSB configuration
'''
super(Buffer, self).__init__()
ipaaca.initialize_ipaaca_rsb_if_needed()
self._owning_component_name = owning_component_name
self._channel = channel if channel is not None else ipaaca.defaults.IPAACA_DEFAULT_CHANNEL
self._participant_config = participant_config
self._uuid = str(uuid.uuid4())[0:8]
# Initialise with a temporary, but already unique, name
self._unique_name = "undef-"+self._uuid
self._iu_store = IUStore()
self._iu_event_handlers = []
def _get_frozen_iu_store(self):
return FrozenIUStore(original_iu_store = self._iu_store)
iu_store = property(fget=_get_frozen_iu_store, doc='Copy-on-read version of the internal IU store')
def _get_channel(self):
return self._channel
channel = property(
fget=_get_channel,
doc='The IPAACA channel the buffer is connected to.')
def register_handler(self, handler_function, for_event_types=None, for_categories=None):
"""Register a new IU event handler function.
Keyword arguments:
handler_function -- a function with the signature (IU, event_type, local)
for_event_types -- a list of event types or None if handler should
be called for all event types
for_categories -- a list of category names or None if handler should
be called for all categories
"""
if handler_function in [h._handler_function for h in self._iu_event_handlers]:
LOGGER.warn("The handler function '" + handler_function.__name__ + '" has been registered before.')
handler = IUEventHandler(handler_function=handler_function, for_event_types=for_event_types, for_categories=for_categories)
self._iu_event_handlers.append(handler)
return handler
def call_iu_event_handlers(self, uid, local, event_type, category):
"""Call registered IU event handler functions registered for this event_type and category."""
for h in self._iu_event_handlers:
try:
h.call(self, uid, local=local, event_type=event_type, category=category)
except Exception as e:
if local:
LOGGER.error('Local IU handler raised an exception upon remote write.' + str(e))
else:
print(str(traceback.format_exc()))
raise e
def _get_owning_component_name(self):
"""Return the name of this Buffer's owning component"""
return self._owning_component_name
owning_component_name = property(_get_owning_component_name)
def _get_unique_name(self):
"""Return the Buffer's unique name."""
return self._unique_name
unique_name = property(_get_unique_name)
class InputBuffer(Buffer):
"""An InputBuffer that holds remote IUs."""
@auto_teardown_instances
def __init__(self, owning_component_name, category_interests=None, channel=None, participant_config=None, resend_active=False):
'''Create an InputBuffer.
Keyword arguments:
owning_compontent_name -- name of the entity that owns this InputBuffer
category_interests -- list of IU categories this Buffer is interested in
participant_config = RSB configuration
'''
super(InputBuffer, self).__init__(owning_component_name, channel, participant_config)
self._unique_name = '/ipaaca/component/'+str(owning_component_name)+'ID'+self._uuid+'/IB'
self._resend_active = resend_active
self._listener_store = {} # one per IU category
self._remote_server_store = {} # one per remote-IU-owning Component
self._category_interests = []
# add own uuid as identifier for hidden category.
self._add_category_listener(str(self._uuid))
if category_interests is not None:
self.add_category_interests(category_interests)
def _get_remote_server(self, event_or_iu):
'''Return (or create, store and return) a remote server.'''
_remote_server_name = self._get_owner(event_or_iu) + '/Server'
if _remote_server_name:
try:
return self._remote_server_store[_remote_server_name]
except KeyError:
be = ipaaca.backend.get_default_backend()
remote_server = be.createRemoteServer(be.Scope(str(_remote_server_name)), config=self._participant_config)
self._remote_server_store[_remote_server_name] = remote_server
return remote_server
else:
None
def _get_owner(self, event_or_iu):
if hasattr(event_or_iu, 'data'):
# is RSB event
data = event_or_iu.data
if hasattr(data, 'owner_name'):
return data.owner_name
elif hasattr(data, 'writer_name'):
return data.writer_name
else:
return None
else:
# is IU
return event_or_iu.owner_name
def _add_category_listener(self, iu_category):
'''Create and store a listener on a specific category.'''
if iu_category not in self._listener_store:
be = ipaaca.backend.get_default_backend()
cat_listener = be.createListener(be.Scope("/ipaaca/channel/"+str(self._channel)+"/category/"+str(iu_category)), config=self._participant_config)
cat_listener.addHandler(self._handle_iu_events)
self._listener_store[iu_category] = cat_listener
self._category_interests.append(iu_category)
LOGGER.info("Added listener in scope /ipaaca/channel/" + str(self._channel) + "/category/" + iu_category)
def _remove_category_listener(self, iu_category):
'''Remove the listener for a specific category.'''
if iu_category in self._listener_store and iu_category in self._category_interests:
del self._listener_store[iu_category]
self._category_interests.remove(iu_category)
LOGGER.info("Removed listener in scope /ipaaca/channel/" + str(self._channel) + "/category/ " + iu_category)
def _teardown(self):
'''OutputBuffer retracts remaining live IUs on teardown'''
self._deactivate_all_internal()
def __del__(self):
'''Perform teardown as soon as Buffer is lost.'''
self._deactivate_all_internal()
def _deactivate_all_internal(self):
'''Deactivate all participants.'''
for listener in self._listener_store.values():
try:
listener.deactivate()
except RuntimeError:
# Is raised if an already deactivated participant is
# deactivated again
pass
def _handle_iu_events(self, event):
'''Dispatch incoming IU events.
Adds incoming IU's to the store, applies payload and commit updates to
IU, calls IU event handlers.'
Keyword arguments:
event -- a converted RSB event
'''
type_ = type(event.data)
if type_ == ipaaca.iu.RemotePushIU:
# a new IU
if event.data.uid not in self._iu_store:
self._iu_store[event.data.uid] = event.data
event.data.buffer = self
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.ADDED, category=event.data.category)
else:
# IU already in our store, overwrite local IU, but do not call
# event handler. This functionality is necessary to undo
# destructive changes after a failing remote updates (undo is
# done via the resend request mechanism).
self._iu_store[event.data.uid] = event.data
event.data.buffer = self
elif type_ == ipaaca.iu.RemoteMessage:
# a new Message, an ephemeral IU that is removed after calling handlers
self._iu_store[ event.data.uid ] = event.data
event.data.buffer = self
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.MESSAGE, category=event.data.category)
del self._iu_store[ event.data.uid ]
else:
if event.data.uid not in self._iu_store:
if (self._resend_active and
not type_ == ipaaca.ipaaca_pb2.IURetraction):
# send resend request to remote server, IURetraction is ignored
try:
self._request_remote_resend(event)
except ipaaca.exception.IUResendRequestFailedError:
LOGGER.warning('Requesting resend for IU {} failed.'.
format(event.data.uid))
else:
LOGGER.warning("Received an update for an IU which we did not receive before.")
return
# an update to an existing IU
if type_ == ipaaca.ipaaca_pb2.IURetraction:
# IU retraction (cannot be triggered remotely)
iu = self._iu_store[event.data.uid]
iu._revision = event.data.revision
iu._apply_retraction() # for now - just sets the _rectracted flag.
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.RETRACTED, category=iu.category)
else:
if event.data.writer_name == self.unique_name:
# Notify only for remotely triggered events;
# Discard updates that originate from this buffer
return
if type_ == ipaaca.ipaaca_pb2.IUCommission:
# IU commit
iu = self._iu_store[event.data.uid]
iu._apply_commission()
iu._revision = event.data.revision
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.COMMITTED, category=iu.category)
elif type_ == ipaaca.converter.IUPayloadUpdate:
# IU payload update
iu = self._iu_store[event.data.uid]
iu._apply_update(event.data)
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.UPDATED, category=iu.category)
elif type_ == ipaaca.converter.IULinkUpdate:
# IU link update
iu = self._iu_store[event.data.uid]
iu._apply_link_update(event.data)
self.call_iu_event_handlers(event.data.uid, local=False, event_type=ipaaca.iu.IUEventType.LINKSUPDATED, category=iu.category)
else:
LOGGER.warning('Warning: _handle_iu_events failed to handle an object of type '+str(type_))
def add_category_interests(self, category_interests):
if not isinstance(category_interests, six.string_types) and hasattr(category_interests, '__iter__'):
for interest in category_interests:
self._add_category_listener(interest)
else:
self._add_category_listener(category_interests)
def remove_category_interests(self, category_interests):
if not isinstance(category_interests, six.string_types) and hasattr(category_interests, '__iter__'):
for interest in category_interests:
self._remove_category_listener(interest)
else:
self._remove_category_listener(category_interests)
def _request_remote_resend(self, event):
remote_server = self._get_remote_server(event)
if remote_server:
resend_request = ipaaca.ipaaca_pb2.IUResendRequest()
resend_request.uid = event.data.uid # target iu
resend_request.hidden_scope_name = str(self._uuid) # hidden category name
remote_revision = remote_server.resendRequest(resend_request)
if remote_revision == 0:
raise ipaaca.exception.IUResendRequestFailedError(event.data.uid)
else:
# Remote server is not known
raise ipaaca.exception.IUResendRequestRemoteServerUnknownError(event.data.uid)
def register_handler(self, handler_function, for_event_types=None, for_categories=None):
"""Register a new IU event handler function.
Keyword arguments:
handler_function -- a function with the signature (IU, event_type, local)
for_event_types -- a list of event types or None if handler should
be called for all event types
for_categories -- a list of category names or None if handler should
be called for all categories
"""
handler = super(InputBuffer, self).register_handler(handler_function, for_event_types, for_categories)
try:
for category in handler._for_categories:
self.add_category_interests(category)
except TypeError:
# i.e., None was provided to the handler
pass
return handler
def is_resend_active(self):
return self._resend_active
def set_resend_active(self, active=True):
self._resend_active = active
class OutputBuffer(Buffer):
"""An OutputBuffer that holds local IUs."""
@auto_teardown_instances
def __init__(self, owning_component_name, channel=None, participant_config=None):
'''Create an OutputBuffer.
Keyword arguments:
owning_component_name -- name of the entity that own this buffer
participant_config -- RSB configuration
'''
super(OutputBuffer, self).__init__(owning_component_name, channel, participant_config)
self._unique_name = '/ipaaca/component/' + str(owning_component_name) + 'ID' + self._uuid + '/OB'
be = ipaaca.backend.get_default_backend()
self._server = be.createLocalServer(self, be.Scope(self._unique_name + '/Server'), config=self._participant_config)
self._informer_store = {}
self._id_prefix = str(owning_component_name)+'-'+str(self._uuid)+'-IU-'
self.__iu_id_counter_lock = threading.Lock()
def _teardown(self):
'''OutputBuffer retracts remaining live IUs on teardown'''
self._retract_all_internal()
self._deactivate_all_internal()
def __del__(self):
'''Perform teardown (IU retractions) as soon as Buffer is lost.
Note that at program exit the teardown might be called
twice for live objects (atexit, then del), but the
_retract_all_internal method prevents double retractions.'''
self._retract_all_internal()
self._deactivate_all_internal()
def _remote_update_links(self, update):
'''Apply a remotely requested update to one of the stored IU's links.'''
if update.uid not in self._iu_store:
LOGGER.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(update.uid))
return 0
iu = self._iu_store[update.uid]
with iu.revision_lock:
if (update.revision != 0) and (update.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
LOGGER.warning("Remote write operation failed because request was out of date; IU "+str(update.uid))
return 0
if update.is_delta:
iu.modify_links(add=update.new_links, remove=update.links_to_remove, writer_name=update.writer_name)
else:
iu.set_links(links=update.new_links, writer_name=update.writer_name)
self.call_iu_event_handlers(update.uid, local=True, event_type=ipaaca.iu.IUEventType.LINKSUPDATED, category=iu.category)
return iu.revision
def _remote_update_payload(self, update):
'''Apply a remotely requested update to one of the stored IU's payload.'''
if update.uid not in self._iu_store:
LOGGER.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(update.uid))
return 0
iu = self._iu_store[update.uid]
with iu.revision_lock:
if (update.revision != 0) and (update.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
LOGGER.warning("Remote update_payload operation failed because request was out of date; IU "+str(update.uid))
LOGGER.warning(" Writer was: "+update.writer_name)
LOGGER.warning(" Requested update was: (New keys:) "+','.join(update.new_items.keys())+' (Removed keys:) '+','.join(update.keys_to_remove))
LOGGER.warning(" Referred-to revision was "+str(update.revision)+' while local revision is '+str(iu.revision))
return 0
if update.is_delta:
#print('Writing delta update by '+str(update.writer_name))
with iu.payload:
for k in update.keys_to_remove:
iu.payload.__delitem__(k, writer_name=update.writer_name)
for k,v in update.new_items.items():
iu.payload.__setitem__(k, v, writer_name=update.writer_name)
else:
#print('Writing non-incr update by '+str(update.writer_name))
iu._set_payload(update.new_items, writer_name=update.writer_name)
# _set_payload etc. have also incremented the revision number
self.call_iu_event_handlers(update.uid, local=True, event_type=ipaaca.iu.IUEventType.UPDATED, category=iu.category)
return iu.revision
def _remote_request_resend(self, iu_resend_request_pack):
''' Resend a requested IU over the specific hidden category.'''
if iu_resend_request_pack.uid not in self._iu_store:
LOGGER.warning("Remote side requested resending of non-existent IU "+str(iu_resend_request_pack.uid))
return 0
iu = self._iu_store[iu_resend_request_pack.uid]
with iu.revision_lock:
if iu_resend_request_pack.hidden_scope_name is not None and iu_resend_request_pack.hidden_scope_name != '':
informer = self._get_informer(iu_resend_request_pack.hidden_scope_name)
informer.publishData(iu)
return iu.revision
else:
return 0
def _remote_commit(self, iu_commission):
'''Apply a remotely requested commit to one of the stored IUs.'''
if iu_commission.uid not in self._iu_store:
LOGGER.warning("Remote InBuffer tried to spuriously write non-existent IU "+str(iu_commission.uid))
return 0
iu = self._iu_store[iu_commission.uid]
with iu.revision_lock:
if (iu_commission.revision != 0) and (iu_commission.revision != iu.revision):
# (0 means "do not pay attention to the revision number" -> "force update")
LOGGER.warning("Remote write operation failed because request was out of date; IU "+str(iu_commission.uid))
return 0
if iu.committed:
return 0
else:
iu._internal_commit(writer_name=iu_commission.writer_name)
self.call_iu_event_handlers(iu_commission.uid, local=True, event_type=ipaaca.iu.IUEventType.COMMITTED, category=iu.category)
return iu.revision
def _get_informer(self, iu_category):
'''Return (or create, store and return) an informer object for IUs of the specified category.'''
if iu_category in self._informer_store:
LOGGER.info("Returning informer on scope "+"/ipaaca/channel/"+str(self._channel)+"/category/"+str(iu_category))
return self._informer_store[iu_category]
be = ipaaca.backend.get_default_backend()
informer_iu = be.createInformer(
be.Scope("/ipaaca/channel/"+str(self._channel)+"/category/"+str(iu_category)),
config=self._participant_config,
dataType=object)
self._informer_store[iu_category] = informer_iu #new_tuple
LOGGER.info("Returning NEW informer on scope "+"/ipaaca/channel/"+str(self._channel)+"/category/"+str(iu_category))
return informer_iu #return new_tuple
def add(self, iu):
'''Add an IU to the IU store, assign an ID and publish it.'''
if iu.uid in self._iu_store:
raise ipaaca.exception.IUPublishedError(iu)
if iu.buffer is not None:
raise ipaaca.exception.IUPublishedError(iu)
if iu.retracted:
raise ipaaca.exception.IURetractedError(iu)
if iu.access_mode != ipaaca.iu.IUAccessMode.MESSAGE:
# Messages are not really stored in the OutputBuffer
self._iu_store[iu.uid] = iu
iu.buffer = self
self._publish_iu(iu)
def remove(self, iu=None, iu_uid=None):
'''Retracts an IU and removes it from the OutputBuffer.'''
if iu is None:
if iu_uid is None:
return None
else:
if iu_uid not in self._iu_store:
raise ipaaca.exception.IUNotFoundError(iu_uid)
iu = self._iu_store[iu_uid]
# unpublish the IU
self._retract_iu(iu)
del self._iu_store[iu.uid]
return iu
def _publish_iu(self, iu):
'''Publish an IU.'''
informer = self._get_informer(iu._category)
informer.publishData(iu)
def _retract_iu(self, iu):
'''Retract an IU.'''
iu._retracted = True
iu_retraction = ipaaca.ipaaca_pb2.IURetraction()
iu_retraction.uid = iu.uid
iu_retraction.revision = iu.revision
informer = self._get_informer(iu._category)
informer.publishData(iu_retraction)
def _retract_all_internal(self):
'''Retract all IUs without removal (for Buffer teardown).'''
for iu in self._iu_store.values():
if not iu._retracted:
self._retract_iu(iu)
def _deactivate_all_internal(self):
'''Deactivate all participants.'''
try:
self._server.deactivate()
except RuntimeError:
# Is raised if an already deactivated participant is
# deactivated again
pass
for informer in self._informer_store.values():
try:
informer.deactivate()
except RuntimeError:
# Is raised if an already deactivated participant is
# deactivated again
pass
def _send_iu_commission(self, iu, writer_name):
'''Send IU commission.
Keyword arguments:
iu -- the IU that has been committed to
writer_name -- name of the Buffer that initiated this commit, necessary
to enable remote components to filter out updates that originated
from their own operations
'''
# a raw Protobuf object for IUCommission is produced
# (unlike updates, where we have an intermediate class)
iu_commission = ipaaca.ipaaca_pb2.IUCommission()
iu_commission.uid = iu.uid
iu_commission.revision = iu.revision
iu_commission.writer_name = iu.owner_name if writer_name is None else writer_name
informer = self._get_informer(iu._category)
informer.publishData(iu_commission)
def _send_iu_link_update(self, iu, is_delta, revision, new_links=None, links_to_remove=None, writer_name="undef"):
'''Send an IU link update.
Keyword arguments:
iu -- the IU being updated
is_delta -- whether this is an incremental update or a replacement
the whole link dictionary
revision -- the new revision number
new_links -- a dictionary of new link sets
links_to_remove -- a dict of the link sets that shall be removed
writer_name -- name of the Buffer that initiated this update, necessary
to enable remote components to filter out updates that originate d
from their own operations
'''
if new_links is None:
new_links = {}
if links_to_remove is None:
links_to_remove = {}
link_update = ipaaca.converter.IULinkUpdate(iu._uid, is_delta=is_delta, revision=revision)
link_update.new_links = new_links
if is_delta:
link_update.links_to_remove = links_to_remove
link_update.writer_name = writer_name
informer = self._get_informer(iu._category)
informer.publishData(link_update)
# FIXME send the notification to the target, if the target is not the writer_name
def _send_iu_payload_update(self, iu, is_delta, revision, new_items=None, keys_to_remove=None, writer_name="undef"):
'''Send an IU payload update.
Keyword arguments:
iu -- the IU being updated
is_delta -- whether this is an incremental update or a replacement
revision -- the new revision number
new_items -- a dictionary of new payload items
keys_to_remove -- a list of the keys that shall be removed from the
payload
writer_name -- name of the Buffer that initiated this update, necessary
to enable remote components to filter out updates that originate d
from their own operations
'''
if new_items is None:
new_items = {}
if keys_to_remove is None:
keys_to_remove = []
payload_update = ipaaca.converter.IUPayloadUpdate(
uid=iu._uid,
revision=revision,
is_delta=is_delta,
payload_type=iu.payload_type)
payload_update.new_items = new_items
if is_delta:
payload_update.keys_to_remove = keys_to_remove
payload_update.writer_name = writer_name
informer = self._get_informer(iu._category)
informer.publishData(payload_update)
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import ipaaca.defaults
import ipaaca.exception
import ipaaca.iu
import ipaaca.misc
import os
import re
try:
import configparser
except:
import ConfigParser as configparser
LOGGER = ipaaca.misc.get_library_logger()
__global_config = None
class Config(object):
def __init__(self):
self._store = {}
def get_with_default(self, key, default_value, warn=False):
if key in self._store:
return self._store[key]
else:
notif = LOGGER.warning if warn else LOGGER.debug
notif('Config key '+str(key)+' not found, returning default of '+str(default_value))
return default_value
def populate_from_global_sources(self):
self._store = {}
self.populate_from_any_conf_files()
self.populate_from_environment()
#self.populate_from_argv_overrides() # TODO IMPLEMENT_ME
def populate_from_any_conf_files(self):
globalconf = os.getenv('HOME', '')+'/.config/ipaaca.conf'
for filename in ['ipaaca.conf', globalconf]:
try:
f = open(filename, 'r')
c = configparser.ConfigParser()
c.readfp(f)
f.close()
LOGGER.info('Including configuration from '+filename)
for k,v in c.items('ipaaca'):
self._store[k] = v
return
except:
pass
LOGGER.info('Could not load ipaaca.conf either here or in ~/.config')
def populate_from_environment(self):
for k, v in os.environ.items():
if k.startswith('IPAACA_'):
if re.match(r'^[A-Za-z0-9_]*$', k) is None:
LOGGER.warning('Ignoring malformed environment key')
else:
if len(v)>1023:
LOGGER.warning('Ignoring long environment value')
else:
# remove initial IPAACA_ and transform key to dotted lowercase
trans_key = k[7:].lower().replace('_', '.')
self._store[trans_key] = v
LOGGER.debug('Configured from environment: '+str(trans_key)+'="'+str(v)+'"')
def get_global_config():
global __global_config
if __global_config is None:
__global_config = Config()
__global_config.populate_from_global_sources()
return __global_config
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import collections
#import rsb.converter
import ipaaca.ipaaca_pb2
import ipaaca.defaults
import ipaaca.exception
import ipaaca.iu
import ipaaca.misc
LOGGER = ipaaca.misc.get_library_logger()
try:
import simplejson as json
except ImportError:
import json
LOGGER.warn('INFO: Using module "json" instead of "simplejson". Install "simplejson" for better performance.')
__all__ = [
'IntConverter',
'IUConverter',
'IULinkUpdate',
'IULinkUpdateConverter',
'IUPayloadUpdate',
'IUPayloadUpdateConverter',
'MessageConverter',
'register_global_converter',
]
_LOW_LEVEL_WIRE_SCHEMA_MAP = None
def LOW_LEVEL_WIRE_SCHEMA_FOR(abstractname):
'''Map the abstract wire schema name (was used in RSB) to a
transport-dependent magic to detect on the wire.
Here: a required protobuf field'''
global _LOW_LEVEL_WIRE_SCHEMA_MAP
if _LOW_LEVEL_WIRE_SCHEMA_MAP is None:
_LOW_LEVEL_WIRE_SCHEMA_MAP = {
int: ipaaca.ipaaca_pb2.WireTypeIntMessage,
ipaaca.iu.IU: ipaaca.ipaaca_pb2.WireTypeIU,
ipaaca.iu.Message: ipaaca.ipaaca_pb2.WireTypeMessageIU,
IUPayloadUpdate: ipaaca.ipaaca_pb2.WireTypeIUPayloadUpdate,
IULinkUpdate: ipaaca.ipaaca_pb2.WireTypeIULinkUpdate,
'int': ipaaca.ipaaca_pb2.WireTypeIntMessage,
'ipaaca-iu': ipaaca.ipaaca_pb2.WireTypeIU,
'ipaaca-messageiu': ipaaca.ipaaca_pb2.WireTypeMessageIU,
'ipaaca-iu-payload-update': ipaaca.ipaaca_pb2.WireTypeIUPayloadUpdate,
'ipaaca-iu-link-update': ipaaca.ipaaca_pb2.WireTypeIULinkUpdate,
}
return _LOW_LEVEL_WIRE_SCHEMA_MAP.get(abstractname)
def __fail_no_type_converter():
raise ipaaca.exception.BackendSerializationError()
class FailingDict(dict):
def __init__(self, error_class, *args, **kwargs):
super(FailingDict, self).__init__(*args, **kwargs)
self._error_class = error_class
def __getitem__(self, k):
if k in self:
return dict.__getitem__(self, k)
else:
raise self._error_class(k)
# global converter / [un]marshaller store
__converter_registry_by_type = FailingDict(ipaaca.exception.BackendSerializationError)
__converter_registry_by_wire_schema = FailingDict(ipaaca.exception.BackendDeserializationError)
def register_global_converter(converter):
global __converter_registry_by_type, __converter_registry_by_wire_schema
real_wire_schema = LOW_LEVEL_WIRE_SCHEMA_FOR(converter._wire_schema)
if real_wire_schema is None:
raise NotImplementedError('There is no entry in the _LOW_LEVEL_WIRE_SCHEMA_MAP for '+str(converter._wire_schema))
if real_wire_schema in __converter_registry_by_wire_schema:
raise ipaaca.exception.ConverterRegistrationError(real_wire_schema)
if converter._data_type in __converter_registry_by_type:
raise ipaaca.exception.ConverterRegistrationError(converter._data_type.__name__)
__converter_registry_by_type[converter._data_type] = converter
__converter_registry_by_wire_schema[real_wire_schema] = converter
def deserialize(lowlevel_message):
pbo_outer = ipaaca.ipaaca_pb2.TransportLevelWrapper()
pbo_outer.ParseFromString(lowlevel_message)
type_ = pbo_outer.transport_message_type
#print('Received wire message type', type_)
if type_ in __converter_registry_by_wire_schema:
return __converter_registry_by_wire_schema[type_].deserialize(pbo_outer.raw_message, None)
else:
pbo = None
if type_ == ipaaca.ipaaca_pb2.WireTypeRemoteRequestResult:
pbo = ipaaca.ipaaca_pb2.RemoteRequestResult()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIURetraction:
pbo = ipaaca.ipaaca_pb2.IURetraction()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIUCommission:
pbo = ipaaca.ipaaca_pb2.IUCommission()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIUResendRequest:
pbo = ipaaca.ipaaca_pb2.IUResendRequest()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIUPayloadUpdateRequest:
pbo = ipaaca.ipaaca_pb2.IUPayloadUpdateRequest()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIUCommissionRequest:
pbo = ipaaca.ipaaca_pb2.IUCommissionRequest()
elif type_ == ipaaca.ipaaca_pb2.WireTypeIULinkUpdateRequest:
pbo = ipaaca.ipaaca_pb2.IULinkUpdateRequest()
if pbo is None:
raise ipaaca.exception.BackendDeserializationError(type_)
else:
pbo.ParseFromString(pbo_outer.raw_message)
return pbo
raise ipaaca.exception.BackendDeserializationError(type_)
def serialize(obj):
inner, type_ = None, None
if obj.__class__ in __converter_registry_by_type:
cls_ = obj.__class__
inner, wire = __converter_registry_by_type[obj.__class__].serialize(obj)
type_ = LOW_LEVEL_WIRE_SCHEMA_FOR(wire)
else:
cls_ = obj.__class__
if cls_ == ipaaca.ipaaca_pb2.RemoteRequestResult:
type_ = ipaaca.ipaaca_pb2.WireTypeRemoteRequestResult
elif cls_ == ipaaca.ipaaca_pb2.IURetraction:
type_ = ipaaca.ipaaca_pb2.WireTypeIURetraction
elif cls_ == ipaaca.ipaaca_pb2.IUCommission:
type_ = ipaaca.ipaaca_pb2.WireTypeIUCommission
elif cls_ == ipaaca.ipaaca_pb2.IUResendRequest:
type_ = ipaaca.ipaaca_pb2.WireTypeIUResendRequest
elif cls_ == ipaaca.ipaaca_pb2.IUPayloadUpdateRequest:
type_ = ipaaca.ipaaca_pb2.WireTypeIUPayloadUpdateRequest
elif cls_ == ipaaca.ipaaca_pb2.IUCommissionRequest:
type_ = ipaaca.ipaaca_pb2.WireTypeIUCommissionRequest
elif cls_ == ipaaca.ipaaca_pb2.IULinkUpdateRequest:
type_ = ipaaca.ipaaca_pb2.WireTypeIULinkUpdateRequest
if type_ is None:
raise ipaaca.exception.BackendSerializationError(cls_)
else:
inner = obj.SerializeToString()
pbo = ipaaca.ipaaca_pb2.TransportLevelWrapper()
pbo.transport_message_type = type_
pbo.raw_message = inner
return bytearray(pbo.SerializeToString())
class ConverterBase(object):
'''Base for converters (to serialize and unserialize
data automatically depending on its Python type).'''
def __init__(self, substrate, data_type, wire_schema):
self._substrate = substrate
self._wire_schema = wire_schema
self._data_type = data_type
self.wireSchema = wire_schema # added compat with RSB
#print('Made a ConverterBase with wire '+str(self._wire_schema)+' and data '+str(self._data_type))
def serialize(self, value):
raise NotImplementedError('NOT IMPLEMENTED for ' \
+ self.__class__.__name__+': serialize')
def deserialize(self, stream, _UNUSED_override_wire_schema):
raise NotImplementedError('NOT IMPLEMENTED for ' \
+ self.__class__.__name__+': deserialize')
class IntConverter(ConverterBase):
"""Convert Python int objects to Protobuf ints and vice versa."""
def __init__(self, wireSchema="int", dataType=None):
super(IntConverter, self).__init__(bytearray, int, wireSchema)
def serialize(self, value):
pbo = ipaaca.ipaaca_pb2.IntMessage()
pbo.value = value
return pbo.SerializeToString(), self.wireSchema
def deserialize(self, byte_stream, ws):
pbo = ipaaca.ipaaca_pb2.IntMessage()
pbo.ParseFromString(byte_stream)
return pbo.value
def pack_payload_entry(entry, key, value, _type=None):
#if _type is None: _type=ipaaca.iu.IUPayloadType.JSON
entry.key = key
if _type is None or _type == ipaaca.iu.IUPayloadType.JSON:
entry.value = json.dumps(value)
elif _type == ipaaca.iu.IUPayloadType.STR or _type == 'MAP':
entry.value = str(value)
else:
raise ipaaca.exception.IpaacaException('Asked to send payload entry with unsupported type "' + _type + '".')
entry.type = _type
def unpack_payload_entry(entry):
# We assume that the only transfer types are 'STR' or 'JSON'. Both are transparently handled by json.loads
if entry.type == ipaaca.iu.IUPayloadType.JSON:
return json.loads(entry.value)
elif entry.type == ipaaca.iu.IUPayloadType.STR or entry.type == 'str':
return entry.value
else:
LOGGER.warn('Received payload entry with unsupported type "' + entry.type + '".')
return entry.value
class IUConverter(ConverterBase):
'''
Converter class for Full IU representations
wire:bytearray <-> wire-schema:ipaaca-full-iu <-> class ipaacaRSB.IU
'''
def __init__(self, wireSchema="ipaaca-iu", dataType=None): #ipaaca.iu.IU):
super(IUConverter, self).__init__(bytearray, ipaaca.iu.IU if dataType is None else dataType, wireSchema)
self._access_mode = ipaaca.ipaaca_pb2.IU.PUSH
self._remote_data_type = ipaaca.iu.RemotePushIU
def serialize(self, iu):
pbo = ipaaca.ipaaca_pb2.IU()
pbo.access_mode = self._access_mode
pbo.uid = iu._uid
pbo.revision = iu._revision
pbo.category = iu._category
pbo.payload_type = iu._payload_type
pbo.owner_name = iu._owner_name
pbo.committed = iu._committed
pbo.read_only = iu._read_only
for k, v in iu._payload.items():
entry = pbo.payload.add()
pack_payload_entry(entry, k, v, iu.payload_type)
for type_ in iu._links.keys():
linkset = pbo.links.add()
linkset.type = type_
linkset.targets.extend(iu._links[type_])
return pbo.SerializeToString(), self.wireSchema
def deserialize(self, byte_stream, ws):
pbo = ipaaca.ipaaca_pb2.IU()
pbo.ParseFromString(byte_stream)
_payload = {}
for entry in pbo.payload:
_payload[entry.key] = unpack_payload_entry(entry)
_links = collections.defaultdict(set)
for linkset in pbo.links:
for target_uid in linkset.targets:
_links[linkset.type].add(target_uid)
return self._remote_data_type(
uid=pbo.uid,
revision=pbo.revision,
read_only = pbo.read_only,
owner_name = pbo.owner_name,
category = pbo.category,
payload_type = 'str' if pbo.payload_type == 'MAP' else pbo.payload_type,
committed = pbo.committed,
payload=_payload,
links=_links)
class MessageConverter(IUConverter):
'''
Converter class for Full IU representations
wire:bytearray <-> wire-schema:ipaaca-full-iu <-> class ipaacaRSB.IU
'''
def __init__(self, wireSchema="ipaaca-messageiu", dataType=None): #ipaaca.iu.Message):
super(MessageConverter, self).__init__(wireSchema, ipaaca.iu.Message)
self._access_mode = ipaaca.ipaaca_pb2.IU.MESSAGE
self._remote_data_type = ipaaca.iu.RemoteMessage
class IULinkUpdate(object):
def __init__(self, uid, revision, is_delta, writer_name="undef", new_links=None, links_to_remove=None, request_uid=None, request_endpoint=None):
super(IULinkUpdate, self).__init__()
self.uid = uid
self.revision = revision
self.writer_name = writer_name
self.is_delta = is_delta
self.new_links = collections.defaultdict(set) if new_links is None else collections.defaultdict(set, new_links)
self.links_to_remove = collections.defaultdict(set) if links_to_remove is None else collections.defaultdict(set, links_to_remove)
self.request_uid = request_uid
self.request_endpoint = request_endpoint
def __str__(self):
s = 'LinkUpdate(' + 'uid=' + self.uid + ', '
s += 'revision='+str(self.revision)+', '
s += 'writer_name='+str(self.writer_name)+', '
s += 'is_delta='+str(self.is_delta)+', '
s += 'new_links = '+str(self.new_links)+', '
s += 'links_to_remove = '+str(self.links_to_remove)+')'
return s
class IULinkUpdateConverter(ConverterBase):
def __init__(self, wireSchema="ipaaca-iu-link-update", dataType=None): #=IULinkUpdate):
super(IULinkUpdateConverter, self).__init__(bytearray, IULinkUpdate, wireSchema)
def serialize(self, iu_link_update):
pbo = ipaaca.ipaaca_pb2.IULinkUpdate()
pbo.uid = iu_link_update.uid
pbo.writer_name = iu_link_update.writer_name
pbo.revision = iu_link_update.revision
if iu_link_update.request_uid:
pbo.request_uid = iu_link_update.request_uid
if iu_link_update.request_endpoint:
pbo.request_endpoint = iu_link_update.request_endpoint
for type_ in iu_link_update.new_links.keys():
linkset = pbo.new_links.add()
linkset.type = type_
linkset.targets.extend(iu_link_update.new_links[type_])
for type_ in iu_link_update.links_to_remove.keys():
linkset = pbo.links_to_remove.add()
linkset.type = type_
linkset.targets.extend(iu_link_update.links_to_remove[type_])
pbo.is_delta = iu_link_update.is_delta
return pbo.SerializeToString(), self.wireSchema
def deserialize(self, byte_stream, ws):
pbo = ipaaca.ipaaca_pb2.IULinkUpdate()
pbo.ParseFromString(byte_stream)
LOGGER.debug('received an IULinkUpdate for revision '+str(pbo.revision))
iu_link_up = IULinkUpdate( uid=pbo.uid, revision=pbo.revision, writer_name=pbo.writer_name, is_delta=pbo.is_delta, request_uid=pbo.request_uid, request_endpoint=pbo.request_endpoint)
for entry in pbo.new_links:
iu_link_up.new_links[str(entry.type)] = set(entry.targets)
for entry in pbo.links_to_remove:
iu_link_up.links_to_remove[str(entry.type)] = set(entry.targets)
return iu_link_up
class IUPayloadUpdate(object):
def __init__(self, uid, revision, is_delta, payload_type, writer_name="undef", new_items=None, keys_to_remove=None, request_uid=None, request_endpoint=None):
super(IUPayloadUpdate, self).__init__()
self.uid = uid
self.revision = revision
self.payload_type = payload_type
self.writer_name = writer_name
self.is_delta = is_delta
self.new_items = {} if new_items is None else new_items
self.keys_to_remove = [] if keys_to_remove is None else keys_to_remove
self.request_uid = request_uid
self.request_endpoint = request_endpoint
def __str__(self):
s = 'PayloadUpdate(' + 'uid=' + self.uid + ', '
s += 'revision='+str(self.revision)+', '
s += 'writer_name='+str(self.writer_name)+', '
s += 'payload_type='+str(self.payload_type)+', '
s += 'is_delta='+str(self.is_delta)+', '
s += 'new_items = '+str(self.new_items)+', '
s += 'keys_to_remove = '+str(self.keys_to_remove)+')'
return s
class IUPayloadUpdateConverter(ConverterBase):
def __init__(self, wireSchema="ipaaca-iu-payload-update", dataType=None):
super(IUPayloadUpdateConverter, self).__init__(bytearray, IUPayloadUpdate, wireSchema)
def serialize(self, iu_payload_update):
pbo = ipaaca.ipaaca_pb2.IUPayloadUpdate()
pbo.uid = iu_payload_update.uid
pbo.writer_name = iu_payload_update.writer_name
pbo.revision = iu_payload_update.revision
if iu_payload_update.request_uid:
pbo.request_uid = iu_payload_update.request_uid
if iu_payload_update.request_endpoint:
pbo.request_endpoint = iu_payload_update.request_endpoint
for k, v in iu_payload_update.new_items.items():
entry = pbo.new_items.add()
pack_payload_entry(entry, k, v, iu_payload_update.payload_type)
pbo.keys_to_remove.extend(iu_payload_update.keys_to_remove)
pbo.is_delta = iu_payload_update.is_delta
return pbo.SerializeToString(), self.wireSchema
def deserialize(self, byte_stream, ws):
pbo = ipaaca.ipaaca_pb2.IUPayloadUpdate()
pbo.ParseFromString(byte_stream)
LOGGER.debug('received an IUPayloadUpdate for revision '+str(pbo.revision))
iu_up = IUPayloadUpdate( uid=pbo.uid, revision=pbo.revision, payload_type=None, writer_name=pbo.writer_name, is_delta=pbo.is_delta, request_uid=pbo.request_uid, request_endpoint=pbo.request_endpoint)
for entry in pbo.new_items:
iu_up.new_items[entry.key] = unpack_payload_entry(entry)
iu_up.keys_to_remove = pbo.keys_to_remove[:]
return iu_up
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
IPAACA_DEFAULT_CHANNEL = 'default'
IPAACA_LOGGER_NAME = 'ipaaca'
IPAACA_DEFAULT_LOGGING_LEVEL = 'WARNING'
IPAACA_DEFAULT_IU_PAYLOAD_TYPE = 'JSON' # one of ipaaca.iu.IUPayloadType
IPAACA_DEFAULT_RSB_HOST = None
IPAACA_DEFAULT_RSB_PORT = None
IPAACA_DEFAULT_RSB_TRANSPORT = None
IPAACA_DEFAULT_RSB_SOCKET_SERVER = None
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
__all__ = [
'IpaacaError',
'IUCommittedError',
'IUNotFoundError',
'IUPayloadLockedError',
'IUPayloadLockTimeoutError',
'IUPublishedError',
'IUReadOnlyError',
'IUResendRequestFailedError',
'IUUpdateFailedError',
]
class IpaacaError(Exception): pass
class BackendInitializationError(IpaacaError):
"""Error indicating that type marshalling cannot proceed
because no matching converter is known."""
def __init__(self, name=''):
super(BackendInitializationError, self).__init__( \
'Failed to initialize selected backend '+str(name))
class BackendSerializationError(IpaacaError):
"""Error indicating that type marshalling cannot proceed
because no matching converter is known."""
def __init__(self, typ):
super(BackendSerializationError, self).__init__( \
'Could not serialize type ' + str(typ.__name__) \
+ ' - no converter registered.')
class BackendDeserializationError(IpaacaError):
"""Error indicating that type unmarshalling cannot proceed
because no matching converter is known."""
def __init__(self, wire_schema):
super(BackendDeserializationError, self).__init__( \
'Could not deserialize wire format "' + str(wire_schema) \
+ '" - no converter registered.')
class ConverterRegistrationError(IpaacaError):
'''Error indicating that a type or wire schema already had a registered converter.'''
def __init__(self, type_name_or_schema):
super(ConverterRegistrationError, self).__init__(
'Failed to register a converter: we already have one for ' \
+ str(type_name_or_schema))
class IUCommittedError(IpaacaError):
"""Error indicating that an IU is immutable because it has been committed to."""
def __init__(self, iu):
super(IUCommittedError, self).__init__('Writing to IU ' + str(iu.uid) + ' failed -- it has been committed to.')
class IUNotFoundError(IpaacaError):
"""Error indicating that an IU UID was unexpectedly not found in an internal store."""
def __init__(self, iu_uid):
super(IUNotFoundError, self).__init__('Lookup of IU ' + str(iu_uid) + ' failed.')
class IUPayloadLockTimeoutError(IpaacaError):
"""Error indicating that exclusive access to the Payload could not be obtained in time."""
def __init__(self, iu):
super(IUPayloadLockTimeoutError, self).__init__('Timeout while accessing payload of IU ' + str(iu.uid) + '.')
class IUPayloadLockedError(IpaacaError):
"""Error indicating that exclusive access to the Payload could not be obtained because someone actually locked it."""
def __init__(self, iu):
super(IUPayloadLockedError, self).__init__('IU '+str(iu.uid)+' was locked during access attempt.')
class IUPublishedError(IpaacaError):
"""Error publishing of an IU failed since it is already in the buffer."""
def __init__(self, iu):
super(IUPublishedError, self).__init__('IU ' + str(iu.uid) + ' is already present in the output buffer.')
class IUReadOnlyError(IpaacaError):
"""Error indicating that an IU is immutable because it is 'read only'."""
def __init__(self, iu):
super(IUReadOnlyError, self).__init__(
'Writing to IU ' + str(iu.uid) + ' failed -- it is read-only.')
class IUResendRequestFailedError(IpaacaError):
"""Error indicating that a remote IU resend failed."""
def __init__(self, iu_uid):
super(IUResendRequestFailedError, self).__init__(
'Remote resend failed for IU ' + str(iu_uid))
class IUResendRequestRemoteServerUnknownError(IpaacaError):
"""Error indicating that a remote IU resend failed."""
def __init__(self, iu_uid):
super(IUResendRequestRemoteServerUnknownError, self).__init__(
'Remote resend request: remote server unknown for IU ' + str(iu_uid))
class IURetractedError(IpaacaError):
"""Error indicating that an IU has been retracted."""
def __init__(self, iu):
super(IURetractedError, self).__init__('Writing to IU ' + str(iu.uid) + ' failed -- it has been retracted.')
class IUUpdateFailedError(IpaacaError):
"""Error indicating that a remote IU update failed."""
def __init__(self, iu):
super(IUUpdateFailedError, self).__init__('Remote update failed for IU ' + str(iu.uid) + '.')
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import collections
import copy
import threading
import uuid
import six
import ipaaca.ipaaca_pb2
import ipaaca.converter
import ipaaca.exception
import ipaaca.misc
import ipaaca.payload
__all__ = [
'IUAccessMode',
'IUEventType',
'IUPayloadType',
'IU',
'Message',
]
LOGGER = ipaaca.misc.get_library_logger()
IUAccessMode = ipaaca.misc.enum(
PUSH = 'PUSH',
REMOTE = 'REMOTE',
MESSAGE = 'MESSAGE'
)
IUEventType = ipaaca.misc.enum(
ADDED = 'ADDED',
COMMITTED = 'COMMITTED',
DELETED = 'DELETED',
RETRACTED = 'RETRACTED',
UPDATED = 'UPDATED',
LINKSUPDATED = 'LINKSUPDATED',
MESSAGE = 'MESSAGE'
)
IUPayloadType = ipaaca.misc.enum(
JSON = 'JSON',
STR = 'STR'
)
class IUInterface(object):
"""Base class of all specialised IU classes."""
def __init__(self, uid, access_mode=IUAccessMode.PUSH, read_only=False, payload_type=None):
"""Creates an IU.
Keyword arguments:
uid -- unique ID of this IU
access_mode -- access mode of this IU
read_only -- flag indicating whether this IU is read_only or not
"""
self._uid = uid
self._revision = None
self._category = None
self._owner_name = None
self._committed = False
self._retracted = False
self._access_mode = access_mode
self._read_only = read_only
self._payload_type = payload_type if payload_type is not None else ipaaca.defaults.IPAACA_DEFAULT_IU_PAYLOAD_TYPE
self._buffer = None
# payload is not present here
self._links = collections.defaultdict(set)
def __str__(self):
s = str(self.__class__)+"{ "
s += "category="+("<None>" if self._category is None else self._category)+" "
s += "uid="+self._uid+" "
s += "(buffer="+(self.buffer.unique_name if self.buffer is not None else "<None>")+") "
s += "owner_name=" + ("<None>" if self.owner_name is None else self.owner_name) + " "
s += "payload={ "
for k,v in self.payload.items():
s += k+":'"+str(v)+"', "
s += "} "
s += "links={ "
for t, ids in self.get_all_links().items():
s += t+":'"+str(ids)+"', "
s += "} "
s += "}"
return s
def _add_and_remove_links(self, add, remove):
'''Just add and remove the new links in our links set, do not send an update here'''
'''Note: Also used for remotely enforced links updates.'''
for type in remove.keys(): self._links[type] -= set(remove[type])
for type in add.keys(): self._links[type] |= set(add[type])
def _replace_links(self, links):
'''Just wipe and replace our links set, do not send an update here'''
'''Note: Also used for remotely enforced links updates.'''
self._links = collections.defaultdict(set)
for type in links.keys(): self._links[type] |= set(links[type])
def add_links(self, type, targets, writer_name=None):
'''Attempt to add links if the conditions are met
and send an update message. Then call the local setter.'''
if isinstance(targets, six.string_types) and hasattr(targets, '__iter__'): targets=[targets]
self._modify_links(is_delta=True, new_links={type:targets}, links_to_remove={}, writer_name=writer_name)
self._add_and_remove_links( add={type:targets}, remove={} )
def remove_links(self, type, targets, writer_name=None):
'''Attempt to remove links if the conditions are met
and send an update message. Then call the local setter.'''
if isinstance(targets, six.string_types) and hasattr(targets, '__iter__'): targets=[targets]
self._modify_links(is_delta=True, new_links={}, links_to_remove={type:targets}, writer_name=writer_name)
self._add_and_remove_links( add={}, remove={type:targets} )
def modify_links(self, add, remove, writer_name=None):
'''Attempt to modify links if the conditions are met
and send an update message. Then call the local setter.'''
self._modify_links(is_delta=True, new_links=add, links_to_remove=remove, writer_name=writer_name)
self._add_and_remove_links( add=add, remove=remove )
def set_links(self, links, writer_name=None):
'''Attempt to set (replace) links if the conditions are met
and send an update message. Then call the local setter.'''
self._modify_links(is_delta=False, new_links=links, links_to_remove={}, writer_name=writer_name)
self._replace_links( links=links )
def get_links(self, type):
return set(self._links[type])
def get_all_links(self):
return copy.deepcopy(self._links)
def _get_revision(self):
return self._revision
revision = property(fget=_get_revision, doc='Revision number of the IU.')
def _get_category(self):
return self._category
category = property(fget=_get_category, doc='Category of the IU.')
def _get_payload_type(self):
return self._payload_type
def _set_payload_type(self, type):
if self._buffer is None:
self._payload_type = type
else:
raise ipaaca.exception.IpaacaError('The IU is already in a buffer, cannot change payload type anymore.')
payload_type = property(
fget=_get_payload_type,
fset=_set_payload_type,
doc='Type of the IU payload')
def _get_committed(self):
return self._committed
committed = property(
fget=_get_committed,
doc='Flag indicating whether this IU has been committed to.')
def _get_retracted(self):
return self._retracted
retracted = property(
fget=_get_retracted,
doc='Flag indicating whether this IU has been retracted.')
def _get_uid(self):
return self._uid
uid = property(fget=_get_uid, doc='Unique ID of the IU.')
def _get_access_mode(self):
return self._access_mode
access_mode = property(fget=_get_access_mode, doc='Access mode of the IU.')
def _get_read_only(self):
return self._read_only
read_only = property(
fget=_get_read_only,
doc='Flag indicating whether this IU is read only.')
def _get_buffer(self):
return self._buffer
def _set_buffer(self, buffer):
if self._buffer is not None:
raise ipaaca.exception.IpaacaError('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
buffer = property(
fget=_get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _get_owner_name(self):
return self._owner_name
def _set_owner_name(self, owner_name):
if self._owner_name is not None:
raise ipaaca.exception.IpaacaError('The IU already has an owner name, cannot change it.')
self._owner_name = owner_name
owner_name = property(
fget=_get_owner_name,
fset=_set_owner_name,
doc="The IU's owner's name.")
class IU(IUInterface):
"""A local IU."""
def __init__(self, category='undef', access_mode=IUAccessMode.PUSH, read_only=False, payload_type=None):
super(IU, self).__init__(uid=None, access_mode=access_mode, read_only=read_only, payload_type=payload_type)
self._revision = 1
self.uid = str(uuid.uuid4())
self._category = str(category)
self.revision_lock = threading.RLock()
self._payload = ipaaca.payload.Payload(iu=self)
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
with self.revision_lock:
# modify links locally
self._increase_revision_number()
if self.is_published:
# send update to remote holders
self.buffer._send_iu_link_update(
self,
revision=self.revision,
is_delta=is_delta,
new_links=new_links,
links_to_remove=links_to_remove,
writer_name=self.owner_name if writer_name is None else writer_name)
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
"""Modify the payload: add or remove items from this payload locally and send update."""
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
with self.revision_lock:
# set item locally
# FIXME: Is it actually set locally?
self._increase_revision_number()
if self.is_published:
#print(' _modify_payload: running send_iu_pl_upd with writer name '+str(writer_name))
# send update to remote holders
self.buffer._send_iu_payload_update(
self,
revision=self.revision,
is_delta=is_delta,
new_items=new_items,
keys_to_remove=keys_to_remove,
writer_name=self.owner_name if writer_name is None else writer_name)
def _increase_revision_number(self):
self._revision += 1
def _internal_commit(self, writer_name=None):
if self._committed:
return
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
with self.revision_lock:
self._increase_revision_number()
self._committed = True
if self.buffer is not None:
self.buffer._send_iu_commission(self, writer_name=writer_name)
def commit(self):
"""Commit to this IU."""
return self._internal_commit()
def retract(self):
if self._buffer:
self._buffer.remove(self)
self._buffer = None
else:
self._retracted = True
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl, writer_name=None):
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
with self.revision_lock:
self._increase_revision_number()
self._payload = ipaaca.payload.Payload(
iu=self,
writer_name=None if self.buffer is None else (self.buffer.unique_name if writer_name is None else writer_name),
new_payload=new_pl)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of this IU.')
def _get_is_published(self):
return self.buffer is not None
is_published = property(
fget=_get_is_published,
doc='Flag indicating whether this IU has been published or not.')
def _set_buffer(self, buffer):
if self._buffer is not None:
raise ipaaca.exception.IpaacaError('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
self.owner_name = buffer.unique_name
self._payload.owner_name = buffer.unique_name
buffer = property(
fget=IUInterface._get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _set_uid(self, uid):
if self._uid is not None:
raise AttributeError('The uid of IU ' + self.uid + ' has already been set, cannot change it.')
self._uid = uid
uid = property(
fget=IUInterface._get_uid,
fset=_set_uid,
doc='Unique ID of the IU.')
class Message(IU):
"""Local IU of Message sub-type. Can be handled like a normal IU, but on the remote side it is only existent during the handler calls."""
def __init__(self, category='undef', access_mode=IUAccessMode.MESSAGE, read_only=True, payload_type=None):
super(Message, self).__init__(category=str(category), access_mode=access_mode, read_only=read_only, payload_type=payload_type)
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
if self.is_published:
LOGGER.info('Info: modifying a Message after sending has no global effects')
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
if self.is_published:
LOGGER.info('Info: modifying a Message after sending has no global effects')
def _increase_revision_number(self):
self._revision += 1
def _internal_commit(self, writer_name=None):
if self.is_published:
LOGGER.info('Info: committing to a Message after sending has no global effects')
def commit(self):
return self._internal_commit()
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl, writer_name=None):
if self.is_published:
LOGGER.info('Info: modifying a Message after sending has no global effects')
else:
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
with self.revision_lock:
self._increase_revision_number()
self._payload = ipaaca.payload.Payload(
iu=self,
writer_name=None if self.buffer is None else (self.buffer.unique_name if writer_name is None else writer_name),
new_payload=new_pl)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of this IU.')
def _get_is_published(self):
return self.buffer is not None
is_published = property(
fget=_get_is_published,
doc='Flag indicating whether this IU has been published or not.')
def _set_buffer(self, buffer):
if self._buffer is not None:
raise ipaaca.exception.IpaacaError('The IU is already in a buffer, cannot move it.')
self._buffer = buffer
self.owner_name = buffer.unique_name
self._payload.owner_name = buffer.unique_name
buffer = property(
fget=IUInterface._get_buffer,
fset=_set_buffer,
doc='Buffer this IU is held in.')
def _set_uid(self, uid):
if self._uid is not None:
raise AttributeError('The uid of IU ' + self.uid + ' has already been set, cannot change it.')
self._uid = uid
uid = property(
fget=IUInterface._get_uid,
fset=_set_uid,
doc='Unique ID of the IU.')
class RemoteMessage(IUInterface):
"""A remote IU with access mode 'MESSAGE'."""
def __init__(self, uid, revision, read_only, owner_name, category, payload_type, committed, payload, links):
super(RemoteMessage, self).__init__(uid=uid, access_mode=IUAccessMode.PUSH, read_only=read_only, payload_type=payload_type)
self._revision = revision
self._category = category
self.owner_name = owner_name
self._committed = committed
# NOTE Since the payload is an already-existant Payload which we didn't modify ourselves,
# don't try to invoke any modification checks or network updates ourselves either.
# We are just receiving it here and applying the new data.
self._payload = ipaaca.payload.Payload(iu=self, new_payload=payload, omit_init_update_message=True)
self._links = links
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
LOGGER.info('Info: modifying a RemoteMessage only has local effects')
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
LOGGER.info('Info: modifying a RemoteMessage only has local effects')
def commit(self):
LOGGER.info('Info: committing to a RemoteMessage only has local effects')
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl):
LOGGER.info('Info: modifying a RemoteMessage only has local effects')
self._payload = ipaaca.payload.Payload(iu=self, new_payload=new_pl, omit_init_update_message=True)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of the IU.')
def _apply_link_update(self, update):
"""Apply a IULinkUpdate to the IU."""
LOGGER.warning('Warning: should never be called: RemoteMessage._apply_link_update')
self._revision = update.revision
if update.is_delta:
self._add_and_remove_links(add=update.new_links, remove=update.links_to_remove)
else:
self._replace_links(links=update.new_links)
def _apply_update(self, update):
"""Apply a IUPayloadUpdate to the IU."""
LOGGER.warning('Warning: should never be called: RemoteMessage._apply_update')
self._revision = update.revision
if update.is_delta:
for k in update.keys_to_remove: self.payload._remotely_enforced_delitem(k)
for k, v in update.new_items.items(): self.payload._remotely_enforced_setitem(k, v)
else:
# NOTE Please read the comment in the constructor
self._payload = ipaaca.payload.Payload(iu=self, new_payload=update.new_items, omit_init_update_message=True)
def _apply_commission(self):
"""Apply commission to the IU"""
LOGGER.warning('Warning: should never be called: RemoteMessage._apply_commission')
self._committed = True
def _apply_retraction(self):
"""Apply retraction to the IU"""
LOGGER.warning('Warning: should never be called: RemoteMessage._apply_retraction')
self._retracted = True
class RemotePushIU(IUInterface):
"""A remote IU with access mode 'PUSH'."""
def __init__(self, uid, revision, read_only, owner_name, category, payload_type, committed, payload, links):
super(RemotePushIU, self).__init__(uid=uid, access_mode=IUAccessMode.PUSH, read_only=read_only, payload_type=payload_type)
self._revision = revision
self._category = category
self.owner_name = owner_name
self._committed = committed
# NOTE Since the payload is an already-existant Payload which we didn't modify ourselves,
# don't try to invoke any modification checks or network updates ourselves either.
# We are just receiving it here and applying the new data.
self._payload = ipaaca.payload.Payload(iu=self, new_payload=payload, omit_init_update_message=True)
self._links = links
def _modify_links(self, is_delta=False, new_links={}, links_to_remove={}, writer_name=None):
"""Modify the links: add or remove item from this payload remotely and send update."""
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
if self._read_only:
raise ipaaca.exception.IUReadOnlyError(self)
requested_update = ipaaca.converter.IULinkUpdate(
uid=self.uid,
revision=self.revision,
is_delta=is_delta,
writer_name=self.buffer.unique_name,
new_links=new_links,
links_to_remove=links_to_remove)
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updateLinks(requested_update)
if new_revision == 0:
raise ipaaca.exception.IUUpdateFailedError(self)
else:
self._revision = new_revision
def _modify_payload(self, is_delta=True, new_items={}, keys_to_remove=[], writer_name=None):
"""Modify the payload: add or remove item from this payload remotely and send update."""
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
if self._read_only:
raise ipaaca.exception.IUReadOnlyError(self)
requested_update = ipaaca.converter.IUPayloadUpdate(
uid=self.uid,
revision=self.revision,
payload_type=self.payload_type,
is_delta=is_delta,
writer_name=self.buffer.unique_name,
new_items=new_items,
keys_to_remove=keys_to_remove)
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updatePayload(requested_update)
if new_revision == 0:
raise ipaaca.exception.IUUpdateFailedError(self)
else:
self._revision = new_revision
def commit(self):
"""Commit to this IU."""
if self._committed:
return
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._read_only:
raise ipaaca.exception.IUReadOnlyError(self)
commission_request = ipaaca.ipaaca_pb2.IUCommission()
commission_request.uid = self.uid
commission_request.revision = self.revision
commission_request.writer_name = self.buffer.unique_name
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.commit(commission_request)
if new_revision == 0:
raise ipaaca.exception.IUUpdateFailedError(self)
else:
self._revision = new_revision
self._committed = True
def _get_payload(self):
return self._payload
def _set_payload(self, new_pl):
if self._retracted:
raise ipaaca.exception.IURetractedError(self)
if self._committed:
raise ipaaca.exception.IUCommittedError(self)
if self._read_only:
raise ipaaca.exception.IUReadOnlyError(self)
requested_update = ipaaca.converter.IUPayloadUpdate(
uid=self.uid,
revision=self.revision,
payload_type=self.payload_type,
is_delta=False,
writer_name=self.buffer.unique_name,
new_items=new_pl,
keys_to_remove=[])
remote_server = self.buffer._get_remote_server(self)
new_revision = remote_server.updatePayload(requested_update)
if new_revision == 0:
raise ipaaca.exception.IUUpdateFailedError(self)
else:
self._revision = new_revision
# NOTE Please read the comment in the constructor
self._payload = ipaaca.payload.Payload(iu=self, new_payload=new_pl, omit_init_update_message=True)
payload = property(
fget=_get_payload,
fset=_set_payload,
doc='Payload dictionary of the IU.')
def _apply_link_update(self, update):
"""Apply a IULinkUpdate to the IU."""
self._revision = update.revision
if update.is_delta:
self._add_and_remove_links(add=update.new_links, remove=update.links_to_remove)
else:
self._replace_links(links=update.new_links)
def _apply_update(self, update):
"""Apply a IUPayloadUpdate to the IU."""
self._revision = update.revision
if update.is_delta:
for k in update.keys_to_remove: self.payload._remotely_enforced_delitem(k)
for k, v in update.new_items.items(): self.payload._remotely_enforced_setitem(k, v)
else:
# NOTE Please read the comment in the constructor
self._payload = ipaaca.payload.Payload(iu=self, new_payload=update.new_items, omit_init_update_message=True)
def _apply_commission(self):
"""Apply commission to the IU"""
self._committed = True
def _apply_retraction(self):
"""Apply retraction to the IU"""
self._retracted = True
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import argparse
import logging
import sys
import ipaaca.defaults
__all__ = [
'enum',
'IpaacaArgumentParser',
]
def enum(*sequential, **named):
"""Create an enum type.
Based on suggestion of Alec Thomas on stackoverflow.com:
http://stackoverflow.com/questions/36932/
whats-the-best-way-to-implement-an-enum-in-python/1695250#1695250
"""
enums = dict(zip(sequential, range(len(sequential))), **named)
enums['_choices'] = enums.keys()
enums['_values'] = enums.values() # RY e.g. see if raw int is valid
return type('Enum', (object,), enums)
class IpaacaLoggingHandler(logging.Handler):
'''A logging handler that prints to stdout.'''
def __init__(self, prefix='IPAACA', level=logging.NOTSET):
logging.Handler.__init__(self, level)
self._prefix = prefix
def emit(self, record):
meta = '[%s: %s] ' % (self._prefix, str(record.levelname))
msg = str(record.msg.format(record.args))
print(meta + msg)
class RSBLoggingHandler(logging.Handler):
'''A logging handler that prints to stdout, RSB version.'''
def __init__(self, prefix='IPAACA', level=logging.NOTSET):
logging.Handler.__init__(self, level)
self._prefix = prefix
def emit(self, record):
meta = '[%s: %s] ' % (self._prefix, str(record.levelname))
try:
msg = str(record.msg % record.args)
except:
msg = str(record.msg) + ' WITH ARGS: ' + str(record.args)
print(meta + msg)
class GenericNoLoggingHandler(logging.Handler):
'''A logging handler that produces no output'''
def emit(self, record): pass
def get_library_logger():
'''Get ipaaca's library-wide logger object.'''
return logging.getLogger(ipaaca.defaults.IPAACA_LOGGER_NAME)
_IPAACA_LOGGING_HANDLER = IpaacaLoggingHandler('IPAACA')
_GENERIC_NO_LOG_HANDLER = GenericNoLoggingHandler()
# By default, suppress library logging
# - for IPAACA
get_library_logger().addHandler(_GENERIC_NO_LOG_HANDLER)
# - for RSB
logging.getLogger('rsb').addHandler(_GENERIC_NO_LOG_HANDLER)
def enable_logging(level=None):
'''Enable ipaaca's 'library-wide logging.'''
ipaaca_logger = get_library_logger()
ipaaca_logger.addHandler(_IPAACA_LOGGING_HANDLER)
ipaaca_logger.removeHandler(_GENERIC_NO_LOG_HANDLER)
ipaaca_logger.setLevel(level=level if level is not None else
ipaaca.defaults.IPAACA_DEFAULT_LOGGING_LEVEL)
class IpaacaArgumentParser(argparse.ArgumentParser):
class IpaacaDefaultChannelAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_CHANNEL = values
class IpaacaPayloadTypeAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_IU_PAYLOAD_TYPE = values
class IpaacaLoggingLevelAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
enable_logging(values)
class IpaacaRSBLoggingLevelAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
rsb_logger = logging.getLogger('rsb')
rsb_logger.addHandler(RSBLoggingHandler('RSB'))
rsb_logger.removeHandler(_GENERIC_NO_LOG_HANDLER)
rsb_logger.setLevel(level=values)
class IpaacaRSBHost(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_RSB_HOST = values
class IpaacaRSBPort(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_RSB_PORT = values
class IpaacaRSBTransport(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_RSB_TRANSPORT = values
class IpaacaRSBSocketServer(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ipaaca.defaults.IPAACA_DEFAULT_RSB_SOCKET_SERVER = values
def __init__(self, prog=None, usage=None, description=None, epilog=None,
parents=[], formatter_class=argparse.HelpFormatter,
prefix_chars='-', fromfile_prefix_chars=None,
argument_default=None, conflict_handler='error', add_help=True):
super(IpaacaArgumentParser, self).__init__(prog=prog, usage=usage,
description=description, epilog=epilog, parents=parents,
formatter_class=formatter_class, prefix_chars=prefix_chars,
fromfile_prefix_chars=fromfile_prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler, add_help=add_help)
def _add_ipaaca_lib_arguments(self):
# CMD-arguments for ipaaca
ipaacalib_group = self.add_argument_group('IPAACA library arguments')
ipaacalib_group.add_argument(
'--ipaaca-payload-type',
action=self.IpaacaPayloadTypeAction,
choices=['JSON', 'STR'], # one of ipaaca.iu.IUPayloadTypes
dest='_ipaaca_payload_type_',
default='JSON',
help="specify payload type (default: 'JSON')")
ipaacalib_group.add_argument(
'--ipaaca-default-channel',
action=self.IpaacaDefaultChannelAction,
default='default',
metavar='NAME',
dest='_ipaaca_default_channel_',
help="specify default IPAACA channel name (default: 'default')")
ipaacalib_group.add_argument(
'--ipaaca-enable-logging',
action=self.IpaacaLoggingLevelAction,
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
dest='_ipaaca_logging_level_',
help="enable IPAACA logging with threshold")
# CMD-arguments for rsb
rsblib_group = self.add_argument_group('RSB library arguments')
rsblib_group.add_argument(
'--rsb-enable-logging',
action=self.IpaacaRSBLoggingLevelAction,
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
dest='_ipaaca_rsb_enable_logging_',
help="enable RSB logging with threshold")
rsblib_group.add_argument(
'--rsb-host',
action=self.IpaacaRSBHost,
default=None,
dest='_ipaaca_rsb_set_host_',
metavar='HOST',
help="set RSB host")
rsblib_group.add_argument(
'--rsb-port',
action=self.IpaacaRSBPort,
default=None,
dest='_ipaaca_rsb_set_port_',
metavar='PORT',
help="set RSB port")
rsblib_group.add_argument(
'--rsb-transport',
action=self.IpaacaRSBTransport,
default=None,
dest='_ipaaca_rsb_set_transport_',
choices=['spread', 'socket'],
metavar='TRANSPORT',
help="set RSB transport")
rsblib_group.add_argument(
'--rsb-socket-server',
action=self.IpaacaRSBSocketServer,
default=None,
dest='_ipaaca_rsb_set_socket_server_',
choices=['0', '1', 'auto'],
metavar='server',
help="act as server (only when --rsb-transport=socket)")
def parse_args(self, args=None, namespace=None):
self._add_ipaaca_lib_arguments() # Add ipaaca-args just before parsing
result = super(IpaacaArgumentParser, self).parse_args(args, namespace)
# Delete ipaaca specific arguments (beginning with '_ipaaca' and
# ending with an underscore) from the resulting Namespace object.
for item in dir(result):
if item.startswith('_ipaaca') and item.endswith('_'):
delattr(result, item)
return result
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
import threading
import time
import ipaaca.exception
__all__ = [
'Payload',
'PayloadItemDictProxy',
'PayloadItemListProxy',
]
_DEFAULT_PAYLOAD_UPDATE_TIMEOUT = 0.1
class Payload(dict):
def __init__(self, iu, writer_name=None, new_payload=None, omit_init_update_message=False, update_timeout=_DEFAULT_PAYLOAD_UPDATE_TIMEOUT):
self.iu = iu
_pl = {}
for k, v in ({} if new_payload is None else new_payload).items():
_pl[str(k) if type(k) == str else k] = str(v) if type(v) == str else v
# NOTE omit_init_update_message is necessary to prevent checking for
# exceptions and sending updates in the case where we just receive
# a whole new payload from the remote side and overwrite it locally.
for k, v in _pl.items():
dict.__setitem__(self, k, v)
if (not omit_init_update_message) and (self.iu.buffer is not None):
self.iu._modify_payload(
is_delta=False,
new_items=_pl,
keys_to_remove=[],
writer_name=writer_name)
self._update_on_every_change = True
self._update_timeout = update_timeout
self._collected_modifications = {}
self._collected_removals = []
self._batch_update_writer_name = None # name of remote buffer or None
self._batch_update_lock = threading.RLock()
self._batch_update_cond = threading.Condition(threading.RLock())
def __getitem__(self, k):
value = dict.__getitem__(self, k)
if isinstance(value, dict):
return PayloadItemDictProxy(value, self, k)
elif isinstance(value, list):
return PayloadItemListProxy(value, self, k)
else:
return value
def __setitem__(self, k, v, writer_name=None):
with self._batch_update_lock:
k = str(k) if type(k) == str else k
v = str(v) if type(v) == str else v
if self._update_on_every_change:
self.iu._modify_payload(
is_delta=True,
new_items={k:v},
keys_to_remove=[],
writer_name=writer_name)
else: # Collect additions/modifications
self._batch_update_writer_name = writer_name
self._collected_modifications[k] = v
# revoke deletion of item since a new version has been added
self._collected_removals = [i for i in self._collected_removals if i!=k]
return dict.__setitem__(self, k, v)
def __delitem__(self, k, writer_name=None):
with self._batch_update_lock:
k = str(k) if type(k) == str else k
if self._update_on_every_change:
self.iu._modify_payload(
is_delta=True,
new_items={},
keys_to_remove=[k],
writer_name=writer_name)
else: # Collect additions/modifications
self._batch_update_writer_name = writer_name
self._collected_removals.append(k)
# revoke older update of item since more recent changes take precedence
if k in self._collected_modifications: del self._collected_modifications[k]
return dict.__delitem__(self, k)
# Context-manager based batch updates, not thread-safe (on remote updates)
def __enter__(self):
self._wait_batch_update_lock(self._update_timeout)
self._update_on_every_change = False
# Context-manager based batch updates, not thread-safe (on remote updates)
def __exit__(self, type, value, traceback):
self.iu._modify_payload(
is_delta=True,
new_items=self._collected_modifications,
keys_to_remove=self._collected_removals,
writer_name=self._batch_update_writer_name)
self._collected_modifications = {}
self._collected_removals = []
self._update_on_every_change = True
self._batch_update_writer_name = None
self._batch_update_lock.release()
def merge(self, payload, writer_name=None):
with self._batch_update_lock:
for k, v in payload.items():
k = str(k) if type(k) == str else k
v = str(v) if type(v) == str else v
self.iu._modify_payload(
is_delta=True,
new_items=payload,
keys_to_remove=[],
writer_name=writer_name)
return dict.update(self, payload) # batch update
def _remotely_enforced_setitem(self, k, v):
"""Sets an item when requested remotely."""
dict.__setitem__(self, k, v)
def _remotely_enforced_delitem(self, k):
"""Deletes an item when requested remotely."""
if k in self: dict.__delitem__(self, k)
def _wait_batch_update_lock(self, timeout):
# wait lock with time-out http://stackoverflow.com/a/8393033
with self._batch_update_cond:
current_time = start_time = time.time()
while current_time < start_time + timeout:
if self._batch_update_lock.acquire(False):
return True
else:
self._batch_update_cond.wait(timeout - current_time + start_time)
current_time = time.time()
raise ipaaca.exception.IUPayloadLockTimeoutError(self.iu)
class PayloadItemProxy(object):
def __init__(self, content, payload, identifier_in_payload):
self.payload = payload
self.content = content
self.identifier_in_payload = identifier_in_payload
def _notify_payload(self):
try:
self.payload[self.identifier_in_payload] = self.content
except ipaaca.exception.IUUpdateFailedError as e:
# IU update failed. Use the ResendRequest mechanism
# to replace the altered RemotePushIU with the unchanged
# payload from its OutputBuffer.''
iu = self.payload.iu
iu.buffer._request_remote_resend(iu)
raise e # re-raise IUUpdateFailedError from aboves
def _create_proxy(self, obj, identifier_in_payload):
if isinstance(obj, dict):
return PayloadItemDictProxy(obj, self.payload, identifier_in_payload)
elif isinstance(obj, list):
return PayloadItemListProxy(obj, self.payload, identifier_in_payload)
else:
return obj
def __setitem__(self, k, v):
self.content.__setitem__(k,v)
self._notify_payload()
def __getitem__(self, k):
item = self.content.__getitem__(k)
return self._create_proxy(item, k)
def __delitem__(self, k):
self.content.__delitem__(k)
self._notify_payload()
class PayloadItemDictProxy(PayloadItemProxy, dict):
def __init__(self, content, payload, identifier_in_payload):
dict.__init__(self, content)
PayloadItemProxy.__init__(self, content, payload, identifier_in_payload)
def clear(self):
self.content.clear()
self._notify_payload()
def get(self, key, default=None):
value = self.content.get(key, default)
return self._create_proxy(value, key)
# py3port: were these used at all?
# def items(self):
# return [(key, value) for key, value in self.items()]
# py3port: was iteritems
def items(self):
for key, value in self.content.items():
yield key, self._create_proxy(value, key)
# py3port: were these used at all?
# def values(self):
# return [value for value in self.values()]
# py3port: was itervalues
def values(self):
for key, value in self.content.items():
yield self._create_proxy(value, key)
def pop(self, key, *args):
x = self.content.pop(key, *args)
self._notify_payload()
return x
def popitem(self):
x = self.content.popitem()
self._notify_payload()
return x
def setdefault(self, key, default=None):
notification_necessary = not key in self.content
x = self.content.setdefault(key, default)
if notification_necessary:
self._notify_payload()
return x
def update(self, *args, **kwargs):
self.content.update(*args, **kwargs)
self._notify_payload()
class PayloadItemListProxy(PayloadItemProxy, list):
def __init__(self, content, payload, identifier_in_payload):
list.__init__(self, content)
PayloadItemProxy.__init__(self, content, payload, identifier_in_payload)
def __iter__(self):
for index, item in enumerate(self.content):
yield self._create_proxy(item, index)
def append(self, x):
self.content.append(x)
self._notify_payload()
def extend(self, l):
self.content.extend(l)
self._notify_payload()
def insert(self, i, x):
self.content.insert(i, x)
self._notify_payload()
def remove(self, x):
self.content.remove(x)
self._notify_payload()
def pop(self, *args, **kwargs):
x = self.content.pop(*args, **kwargs)
self._notify_payload()
return x
def sort(self, cmp=None, key=None, reverse=False):
self.content.sort(cmp, key, reverse)
self._notify_payload()
def reverse(self):
self.content.reverse()
self._notify_payload()
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
from .notifier import ComponentNotifier
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import datetime
import subprocess
import sys
import threading
import time
import traceback
import uuid
import ipaaca
import ipaaca.misc
import six
__all__ = [
'logger_send_ipaaca_logs',
'logger_set_log_filename',
'logger_set_module_name',
'logger_set_log_level',
'LOG_DEBUG',
'LOG_INFO',
'LOG_WARN',
'LOG_WARNING',
'LOG_ERROR',
]
LogLevel = ipaaca.misc.enum(
DEBUG = 0,
INFO = 1,
WARN = 2,
ERROR = 3,
SILENT = 4,
)
LOG_LEVEL_FROM_STRING_DICT = {
'DEBUG': LogLevel.DEBUG,
'INFO': LogLevel.INFO,
'WARN': LogLevel.WARN,
'WARNING': LogLevel.WARN,
'ERROR': LogLevel.ERROR,
'NONE': LogLevel.SILENT,
'SILENT': LogLevel.SILENT,
}
CURRENT_LOG_LEVEL = LogLevel.DEBUG
LOGGER_LOCK = threading.RLock()
MODULE_NAME = sys.argv[0]
SEND_IPAACA_LOGS = True
OUTPUT_BUFFER = None
STANDARD_LOG_FILE_EXTENSION = '.log'
LOG_MODES = ['append', 'timestamp', 'overwrite']
def logger_set_log_filename(filename, existing=None):
global OUTPUT_BUFFER
if existing is not None and not existing in LOG_MODES:
raise Exception('Invalid log mode {mode} given. '
'Valid options are {options}.'.format(
mode=existing,
options=', '.join(LOG_MODES)))
with LOGGER_LOCK:
if OUTPUT_BUFFER is None:
OUTPUT_BUFFER = ipaaca.OutputBuffer('LogSender')
msg = ipaaca.Message('logcontrol')
msg.payload = {
'cmd': 'open_log_file',
'filename': filename}
if existing is not None:
msg.payload['existing'] = existing
OUTPUT_BUFFER.add(msg)
def logger_set_module_name(name):
global MODULE_NAME
with LOGGER_LOCK:
MODULE_NAME = name
def logger_send_ipaaca_logs(flag=True):
global SEND_IPAACA_LOGS
with LOGGER_LOCK:
SEND_IPAACA_LOGS = flag
def logger_set_log_level(level=LogLevel.DEBUG):
global CURRENT_LOG_LEVEL
with LOGGER_LOCK:
if level in LogLevel._values:
CURRENT_LOG_LEVEL = level
elif isinstance(level, six.string_types) and level.upper() in LOG_LEVEL_FROM_STRING_DICT:
CURRENT_LOG_LEVEL = LOG_LEVEL_FROM_STRING_DICT[level.upper()]
else:
pass # leave previous setting untouched
def LOG_IPAACA(lvl, text, now=0.0, fn='???', thread='???'):
global OUTPUT_BUFFER
uid = str(uuid.uuid4())[0:8]
with LOGGER_LOCK:
if OUTPUT_BUFFER is None:
OUTPUT_BUFFER = ipaaca.OutputBuffer('LogSender')
msg = ipaaca.Message('log')
msg.payload = {
'module': MODULE_NAME,
'function': fn,
'level': lvl,
'time':' %.3f'%now,
'thread': thread,
'uuid': uid,
'text': text,}
try:
OUTPUT_BUFFER.add(msg)
except Exception as e:
LOG_ERROR('Caught an exception while logging via ipaaca. '
+ ' str(e); '
+ traceback.format_exc())
def LOG_CONSOLE(lvlstr, msg, fn_markup='', msg_markup='', now=0.0, fn='???', thread='???'):
if isinstance(msg, six.string_types):
lines = msg.split('\n')
else:
lines = [msg]
for line in lines:
text = lvlstr+' '+thread+' '+fn_markup+fn+'\033[m'+' '+msg_markup+str(line)+'\033[m'
print(text)
fn = ' '*len(fn)
def LOG_ERROR(msg, now=None):
if CURRENT_LOG_LEVEL > LogLevel.ERROR: return
now = time.time() if now is None else now
f = sys._getframe(1)
classprefix = (f.f_locals['self'].__class__.__name__+'.') if 'self' in f.f_locals else ''
fn = classprefix + f.f_code.co_name
thread = threading.current_thread().getName()
with LOGGER_LOCK:
if SEND_IPAACA_LOGS: LOG_IPAACA('ERROR', msg, now=now, fn=fn, thread=thread)
LOG_CONSOLE('\033[38;5;9;1;4m[ERROR]\033[m', msg, fn_markup='\033[38;5;203m', msg_markup='\033[38;5;9;1;4m', now=now, fn=fn, thread=thread)
def LOG_WARN(msg, now=None):
if CURRENT_LOG_LEVEL > LogLevel.WARN: return
now = time.time() if now is None else now
f = sys._getframe(1)
classprefix = (f.f_locals['self'].__class__.__name__+'.') if 'self' in f.f_locals else ''
fn = classprefix + f.f_code.co_name
thread = threading.current_thread().getName()
with LOGGER_LOCK:
if SEND_IPAACA_LOGS: LOG_IPAACA('WARN', msg, now=now, fn=fn, thread=thread)
LOG_CONSOLE('\033[38;5;208;1m[WARN]\033[m ', msg, fn_markup='\033[38;5;214m', msg_markup='\033[38;5;208;1m', now=now, fn=fn, thread=thread)
LOG_WARNING = LOG_WARN
def LOG_INFO(msg, now=None):
if CURRENT_LOG_LEVEL > LogLevel.INFO: return
now = time.time() if now is None else now
f = sys._getframe(1)
classprefix = (f.f_locals['self'].__class__.__name__+'.') if 'self' in f.f_locals else ''
fn = classprefix + f.f_code.co_name
thread = threading.current_thread().getName()
with LOGGER_LOCK:
if SEND_IPAACA_LOGS: LOG_IPAACA('INFO', msg, now=now, fn=fn, thread=thread)
LOG_CONSOLE('[INFO] ', msg, now=now, fn=fn, thread=thread)
def LOG_DEBUG(msg, now=None):
if CURRENT_LOG_LEVEL > LogLevel.DEBUG: return
now = time.time() if now is None else now
f = sys._getframe(1)
classprefix = (f.f_locals['self'].__class__.__name__+'.') if 'self' in f.f_locals else ''
fn = classprefix + f.f_code.co_name
thread = threading.current_thread().getName()
with LOGGER_LOCK:
if SEND_IPAACA_LOGS: LOG_IPAACA('DEBUG', msg, now=now, fn=fn, thread=thread)
LOG_CONSOLE('\033[2m[DEBUG]\033[m', msg, fn_markup='\033[38;5;144m', msg_markup='\033[38;5;248m', now=now, fn=fn, thread=thread)
class LoggerComponent(object):
def __init__(self, filename, log_mode='append'):
self.ib = ipaaca.InputBuffer('Logger', ['log', 'logcontrol'])
self.ib.register_handler(self._logger_handle_iu_event)
self.logfile = None
self.log_mode = log_mode
self.open_logfile(filename)
if self.logfile is None:
print('Logging to console only ...')
print('Press Ctrl-C at any time to terminate the logger.')
def open_logfile(self, filename):
with LOGGER_LOCK:
if filename is None or filename.strip() == '':
print('No log file name specified, not opening one.')
self.close_logfile()
else:
new_logfile = None
try:
# create dir first
ret = subprocess.call(
'mkdir -p `dirname ' + filename + '`',
shell=True)
# proceed with dir+file
if self.log_mode == 'timestamp':
t = datetime.datetime.now().strftime('%Y-%m-%d-%H%M%S')
if filename.endswith(STANDARD_LOG_FILE_EXTENSION):
# insert timestamp between filename and extension
# (only for standard extension)
filename = filename.replace(
STANDARD_LOG_FILE_EXTENSION,
'.' + t + STANDARD_LOG_FILE_EXTENSION)
else: # prepend timestamp
filename = t + '_' + filename
append_if_exist = not (self.log_mode == 'overwrite' or
self.log_mode == 'timestamp')
new_logfile = open(filename, 'a' if append_if_exist else 'w')
if self.logfile is not None:
text = u'Will now continue logging in log file ' + str(filename)
uid = str(uuid.uuid4())[0:8]
tim = time.time()
record = {
'uuid': uid,
'time': tim,
'level': u'INFO',
'text': text,
'module': u'logger',
'function': u'LoggerComponent.open_logfile',
'thread': '-',
'logreceivedtime': tim}
self.logfile.write(str(record)+'\n')
self.logfile.close()
self.logfile = new_logfile
print('Logging to console and {filename} ...'.format(filename=filename))
except Exception as e:
print('Failed to open logfile {filename} for writing! Keeping previous configuration'.format(filename=filename))
def close_logfile(self):
if self.logfile is not None:
text = u'Closing of log file requested.'
uid = str(uuid.uuid4())[0:8]
tim = str(time.time())
record = {
'uuid': uid,
'time': tim,
'level': u'INFO',
'text': text,
'module': u'logger',
'function': u'LoggerComponent.close_logfile',
'thread': u'-',
'logreceivedtime': tim}
self.logfile.write(str(record)+'\n')
self.logfile.close()
print('Closed current log file.')
self.logfile = None
def _logger_handle_iu_event(self, iu, event_type, own):
received_time = "%.3f" % time.time()
with LOGGER_LOCK:
try:
if iu.category == 'log':
pl = iu.payload
message = pl['text'] if 'text' in pl else '(No message.)'
uid = '????????' if 'uuid' not in pl else pl['uuid']
tim = '???' if 'time' not in pl else pl['time']
module = '???' if 'module' not in pl else pl['module']
function = '???' if 'function' not in pl else pl['function']
thread = '???' if 'thread' not in pl else pl['thread']
level = 'INFO' if 'level' not in pl else pl['level']
# dump to console
if level=='WARN':
level='WARNING'
if level not in ['DEBUG','INFO','WARNING','ERROR']:
level = 'INFO'
try:
print('%s %-8s {%s} {%s} {%s} %s'%(tim, ('['+level+']'), thread, module, function, message))
except:
print('Failed to format a string for printing!')
if self.logfile is not None:
try:
record = {
'uuid': uid,
'time': tim,
'level': level,
'text': message,
'module': module,
'function': function,
'thread': thread,
'logreceivedtime': received_time}
self.logfile.write(str(record) + '\n')
except:
print('Failed to write to logfile!')
elif iu.category == 'logcontrol':
cmd = iu.payload['cmd'] if 'cmd' in iu.payload else 'undef'
if cmd == 'open_log_file':
filename = iu.payload['filename'] if 'filename' in iu.payload else ''
if 'existing' in iu.payload:
log_mode_ = iu.payload['existing'].lower()
if log_mode_ not in LOG_MODES:
LOG_WARN(u'Value of "existing" should be "append", "timestamp", or "overwrite", continuing with mode {mode}'.format(mode=self.log_mode))
else:
self.log_mode = log_mode_
self.open_logfile(filename)
elif cmd == 'close_log_file':
self.close_logfile()
else:
LOG_WARN(u'Received unknown logcontrol command: '+str(cmd))
except Exception as e:
print('Exception while logging!') # TODO write to file as well?
print(u' '+str(traceback.format_exc()))
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import os
import threading
import ipaaca.buffer
import ipaaca.iu
import ipaaca.misc
import ipaaca.util.timesync
__all__ = [
'NotificationState',
'ComponentError',
'ComponentNotifier'
]
NotificationState = ipaaca.misc.enum(
NEW = 'new',
OLD = 'old',
DOWN = 'down'
)
class ComponentError(Exception):
def __init__(self, msg):
super(ComponentError, self).__init__(msg)
class ComponentNotifier(object):
NOTIFY_CATEGORY = "componentNotify"
CONTROL_CATEGORY = "componentControl"
SEND_CATEGORIES = "send_categories"
RECEIVE_CATEGORIES = "recv_categories"
CMD = "cmd"
STATE = "state"
NAME = "name"
WHO = "who" # list of names (or empty)
FUNCTION = "function"
PID = "pid"
CMD_REPORT = "report"
def __init__(self, component_name, component_function, send_categories, receive_categories, out_buffer=None, in_buffer=None):
self.component_name = component_name
self.component_function = component_function
self.send_categories = frozenset(send_categories)
self.receive_categories = frozenset(receive_categories)
self.in_buffer = in_buffer if in_buffer is not None else ipaaca.buffer.InputBuffer(component_name + 'Notifier')
self.out_buffer = out_buffer if out_buffer is not None else ipaaca.buffer.OutputBuffer(component_name + 'Notifier')
self.terminated = False
self.initialized = False
self.notification_handlers = []
self.initialize_lock = threading.Lock()
self.notification_handler_lock = threading.Lock()
self.submit_lock = threading.Lock()
# clock sync code, sync slave/master pair will be installed when launched
self.timesync_slave = None
self.timesync_master = None
self.timesync_master_handlers = []
self.timesync_slave_handlers = []
def _submit_notify(self, is_new):
with self.submit_lock:
notify_iu = ipaaca.iu.Message(ComponentNotifier.NOTIFY_CATEGORY)
notify_iu.payload = {
ComponentNotifier.NAME: self.component_name,
ComponentNotifier.FUNCTION: self.component_function,
ComponentNotifier.SEND_CATEGORIES: ",".join(self.send_categories),
ComponentNotifier.RECEIVE_CATEGORIES: ",".join(self.receive_categories),
ComponentNotifier.STATE: NotificationState.NEW if is_new else NotificationState.OLD,
}
self.out_buffer.add(notify_iu)
def terminate(self):
with self.submit_lock:
if self.terminated: return
self.terminated = True
notify_iu = ipaaca.iu.Message(ComponentNotifier.NOTIFY_CATEGORY)
notify_iu.payload = {
ComponentNotifier.NAME: self.component_name,
ComponentNotifier.FUNCTION: self.component_function,
ComponentNotifier.SEND_CATEGORIES: ",".join(self.send_categories),
ComponentNotifier.RECEIVE_CATEGORIES: ",".join(self.receive_categories),
ComponentNotifier.STATE: NotificationState.DOWN,
}
self.out_buffer.add(notify_iu)
def _handle_iu_event(self, iu, event_type, local):
if iu.category == ComponentNotifier.NOTIFY_CATEGORY:
if iu.payload[ComponentNotifier.NAME] == self.component_name:
return
with self.notification_handler_lock:
for h in self.notification_handlers:
h(iu, event_type, local)
if iu.payload[ComponentNotifier.STATE] == "new":
#print("submitting")
self._submit_notify(False)
elif iu.category == ComponentNotifier.CONTROL_CATEGORY:
cmd = iu.payload[ComponentNotifier.CMD]
if cmd=='report':
# Request to report (by component controller)
who = iu.payload[ComponentNotifier.WHO]
# If we are named specifically or it's a broadcast
if len(who)==0 or self.component_name in who:
self._submit_notify(False)
def add_notification_handler(self, handler):
with self.notification_handler_lock:
self.notification_handlers.append(handler)
def launch_timesync_slave_handlers(self, master, slave, latency, offset):
for h in self.timesync_slave_handlers:
h(master, slave, latency, offset)
def launch_timesync_master_handlers(self, master, slave, latency, offset):
for h in self.timesync_master_handlers:
h(master, slave, latency, offset)
def add_timesync_slave_handler(self, handler):
self.timesync_slave_handlers.append(handler)
def add_timesync_master_handler(self, handler):
self.timesync_master_handlers.append(handler)
def send_master_timesync(self):
#if len(self.timesync_master_handlers)==0:
# print('Warning: Sending a master timesync without a registered result callback.')
self.timesync_master.send_master_timesync()
def initialize(self):
with self.initialize_lock:
if self.terminated:
raise ComponentError('Attempted to reinitialize component '+component_name+' after termination')
if (not self.initialized):
self.timesync_slave = ipaaca.util.timesync.TimesyncSlave(component_name=self.component_name, timing_handler=self.launch_timesync_slave_handlers)
self.timesync_master = ipaaca.util.timesync.TimesyncMaster(component_name=self.component_name, timing_handler=self.launch_timesync_master_handlers)
self.in_buffer.register_handler(self._handle_iu_event, ipaaca.iu.IUEventType.MESSAGE, [ComponentNotifier.NOTIFY_CATEGORY, ComponentNotifier.CONTROL_CATEGORY])
self._submit_notify(True)
self.initialized = True
def __enter__(self):
self.initialize()
return self
def __exit__(self, t, v, tb):
self.terminate()
return self
# -*- coding: utf-8 -*-
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Social Cognitive Systems Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
from __future__ import division, print_function
import threading
import time
import ipaaca.buffer
import ipaaca.iu
class TimesyncMaster(object):
def __init__(self, component_name=None, timing_handler=None, debug_offset=0):
self.ob = ipaaca.buffer.OutputBuffer(('' if component_name is None else component_name)+'TimesyncMaster')
self.ib = ipaaca.buffer.InputBuffer(('' if component_name is None else component_name)+'TimesyncMaster', ['timesyncReply'])
# component name to report (None => use buffer name)
self.component_name = component_name if component_name is not None else self.ob.unique_name
#
#self.ob.register_handler(self.handle_timesync_master)
self.ib.register_handler(self.handle_timesync_master)
# master_t1 is identical for all slaves
self.master_t1 = None
self.slave_t1s = {}
self.master_t2s = {}
self.slave_t2s = {}
self.latencies = {}
self.time_offsets = {}
#
self.debug_offset = debug_offset
#
self.timing_handler = timing_handler
def set_timing_handler(self, timing_handler):
self.timing_handler = timing_handler
def send_master_timesync(self):
iu = ipaaca.iu.Message('timesyncRequest')
self.master_t1 = self.get_time()
iu.payload = {
'stage':'0',
'master_t1':str(self.master_t1),
'master':self.component_name,
}
self.ob.add(iu)
def handle_timesync_master(self, iu, event_type, own):
master = iu.payload['master']
if not own and master == self.component_name:
if event_type == ipaaca.IUEventType.ADDED or event_type == ipaaca.IUEventType.UPDATED:
if self.component_name == master:
# reply to our own initial IU
slave = iu.payload['slave']
stage = iu.payload['stage']
if stage=='1':
# initial reply by slave
t1 = iu.payload['slave_t1']
self.slave_t1s[slave] = float(t1)
t2 = self.master_t2s[slave] = self.get_time()
iu.payload.merge({'master_t2': str(t2), 'stage':'2'})
latency1 = t2 - self.master_t1
#print('Before stage 1 for '+slave+': '+str(self.latencies))
self.latencies[slave] = latency1
#print('After stage 1 for '+slave+': '+str(self.latencies))
#print('Latency of round-trip 1: %.3f' % latency1)
elif stage=='3':
#print('At stage 3 for '+slave+': '+str(self.latencies))
# second reply by slave
t2 = iu.payload['slave_t2']
self.slave_t2s[slave] = float(t2)
t_final = self.get_time()
latency1 = self.latencies[slave]
latency2 = t_final - self.master_t2s[slave]
latency = self.latencies[slave] = (latency1+latency2)/2.0
offset1 = (self.slave_t1s[slave]-self.master_t1)-latency/2.0
offset2 = (self.slave_t2s[slave]-self.master_t2s[slave])-latency/2.0
offset = (offset1+offset2)/2.0
iu.payload.merge({'stage':'4', 'latency': str(latency), 'offset':str(offset)})
if self.timing_handler is None:
print('Determined timing of timesync slave '+slave)
print(' Avg round-trip latency: %.3f s'%latency)
print(' Offset of their clock: %.3f s'%offset)
else:
self.timing_handler(self.component_name, slave, latency, offset)
else:
# other stages are handled by time slave handler
pass
def get_time(self):
return time.time() + self.debug_offset
class TimesyncSlave(object):
def __init__(self, component_name=None, timing_handler=None, debug_offset=0):
self.ob = ipaaca.buffer.OutputBuffer(('' if component_name is None else component_name)+'TimesyncSlave')
self.ib = ipaaca.buffer.InputBuffer(('' if component_name is None else component_name)+'TimesyncSlave', ['timesyncRequest'])
# component name to report (None => use buffer name)
self.component_name = component_name if component_name is not None else self.ib.unique_name
self.ob.register_handler(self.handle_timesync_slave)
self.ib.register_handler(self.handle_timesync_slave)
#self.master_t1 = None
#self.master_t2 = None
#self.master = None
self.latency = None
#
self.debug_offset = debug_offset
#
self.timing_handler = timing_handler
def set_timing_handler(self, timing_handler):
self.timing_handler = timing_handler
def handle_timesync_slave(self, iu, event_type, own):
master = iu.payload['master']
stage = iu.payload['stage']
if self.component_name != master:
if not own and stage=='0':
# reply only to IUs from others
#print('Received stage 0 from master '+master)
# initial reply to master
myiu = ipaaca.iu.IU('timesyncReply')
# TODO: add grounded_in link too?
t1 = self.get_time()
myiu.payload = iu.payload
myiu.payload['slave'] = self.component_name
myiu.payload['slave_t1'] = str(t1)
myiu.payload['stage'] = '1'
self.ob.add(myiu)
elif iu.payload['slave'] == self.component_name:
if stage=='2':
#print('Received stage 2 from master '+master)
t2 = self.get_time()
iu.payload.merge({
'slave_t2':str(t2),
'stage':'3',
})
elif stage=='4':
latency = float(iu.payload['latency'])
offset = float(iu.payload['offset'])
if self.timing_handler is None:
print('Timesync master '+master+' determined our timing: ')
print(' Avg round-trip latency: %.3f s'%latency)
print(' Offset of our clock: %.3f s'%offset)
else:
self.timing_handler(master, self.component_name, latency, offset)
def get_time(self):
return time.time() + self.debug_offset
#!/usr/bin/env python
# This file is part of IPAACA, the
# "Incremental Processing Architecture
# for Artificial Conversational Agents".
#
# Copyright (c) 2009-2022 Sociable Agents Group
# CITEC, Bielefeld University
#
# http://opensource.cit-ec.de/projects/ipaaca/
# http://purl.org/net/ipaaca
#
# This file may be licensed under the terms of of the
# GNU Lesser General Public License Version 3 (the ``LGPL''),
# or (at your option) any later version.
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the LGPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the LGPL along with this
# program. If not, go to http://www.gnu.org/licenses/lgpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# The development of this software was supported by the
# Excellence Cluster EXC 277 Cognitive Interaction Technology.
# The Excellence Cluster EXC 277 is a grant of the Deutsche
# Forschungsgemeinschaft (DFG) in the context of the German
# Excellence Initiative.
import time
import logging
import ipaaca
iu_to_write = None
......@@ -38,8 +68,8 @@ while True:
else:
iu.payload = {'a': 'reset'}
except ipaaca.IUUpdateFailedError, e:
ipaaca.logger.warning("Payload update failed (IU changed in the mean time)")
except ipaaca.IUUpdateFailedError as e:
print("Payload update failed (IU changed in the mean time)")
time.sleep(0.1)
exit(0)