diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java index 06e2afb4066..955c067f515 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + package docs.persistence; import scala.Option; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java index 5019ed42b12..2b32a787467 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + package docs.persistence; //#plugin-imports @@ -10,132 +14,45 @@ //#plugin-imports public class PersistencePluginDocTest { - static Object o1 = new Object() { - abstract class MySnapshotStore extends SnapshotStore { - //#snapshot-store-plugin-api - /** - * Plugin Java API. - * - * Asynchronously loads a snapshot. - * - * @param processorId processor id. - * @param criteria selection criteria for loading. - */ - public abstract Future> doLoadAsync(String processorId, SnapshotSelectionCriteria criteria); - - /** - * Plugin Java API. - * - * Asynchronously saves a snapshot. - * - * @param metadata snapshot metadata. - * @param snapshot snapshot. - */ - public abstract Future doSaveAsync(SnapshotMetadata metadata, Object snapshot); - - /** - * Plugin Java API. - * - * Called after successful saving of a snapshot. - * - * @param metadata snapshot metadata. - */ - public abstract void onSaved(SnapshotMetadata metadata) throws Exception; - - /** - * Plugin Java API. - * - * Deletes the snapshot identified by `metadata`. - * - * @param metadata snapshot metadata. - */ - public abstract void doDelete(SnapshotMetadata metadata) throws Exception; - //#snapshot-store-plugin-api + class MySnapshotStore extends SnapshotStore { + @Override + public Future> doLoadAsync(String processorId, SnapshotSelectionCriteria criteria) { + return null; } - abstract class MySyncWriteJournal extends SyncWriteJournal { - //#sync-write-plugin-api - /** - * Plugin Java API. - * - * Synchronously writes a `persistent` message to the journal. - */ - @Override - public abstract void doWrite(PersistentImpl persistent) throws Exception; + @Override + public Future doSaveAsync(SnapshotMetadata metadata, Object snapshot) { + return null; + } - /** - * Plugin Java API. - * - * Synchronously marks a `persistent` message as deleted. - */ - @Override - public abstract void doDelete(PersistentImpl persistent) throws Exception; + @Override + public void onSaved(SnapshotMetadata metadata) throws Exception { + } - /** - * Plugin Java API. - * - * Synchronously writes a delivery confirmation to the journal. - */ - @Override - public abstract void doConfirm(String processorId, long sequenceNr, String channelId) throws Exception; - //#sync-write-plugin-api + @Override + public void doDelete(SnapshotMetadata metadata) throws Exception { } + } - abstract class MyAsyncWriteJournal extends AsyncWriteJournal { - //#async-write-plugin-api - /** - * Plugin Java API. - * - * Asynchronously writes a `persistent` message to the journal. - */ - @Override - public abstract Future doWriteAsync(PersistentImpl persistent); + class MyAsyncJournal extends AsyncWriteJournal { + @Override + public Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback) { + return null; + } - /** - * Plugin Java API. - * - * Asynchronously marks a `persistent` message as deleted. - */ - @Override - public abstract Future doDeleteAsync(PersistentImpl persistent); + @Override + public Future doWriteAsync(PersistentImpl persistent) { + return null; + } - /** - * Plugin Java API. - * - * Asynchronously writes a delivery confirmation to the journal. - */ - @Override - public abstract Future doConfirmAsync(String processorId, long sequenceNr, String channelId); - //#async-write-plugin-api + @Override + public Future doDeleteAsync(PersistentImpl persistent) { + return null; } - abstract class MyAsyncReplay extends AsyncReplay { - //#async-replay-plugin-api - /** - * Plugin Java API. - * - * Asynchronously replays persistent messages. Implementations replay a message - * by calling `replayCallback`. The returned future must be completed when all - * messages (matching the sequence number bounds) have been replayed. The future - * `Long` value must be the highest stored sequence number in the journal for the - * specified processor. The future must be completed with a failure if any of - * the persistent messages could not be replayed. - * - * The `replayCallback` must also be called with messages that have been marked - * as deleted. In this case a replayed message's `deleted` field must be set to - * `true`. - * - * The channel ids of delivery confirmations that are available for a replayed - * message must be contained in that message's `confirms` sequence. - * - * @param processorId processor id. - * @param fromSequenceNr sequence number where replay should start. - * @param toSequenceNr sequence number where replay should end (inclusive). - * @param replayCallback called to replay a single message. - */ - @Override - public abstract Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback); - //#async-replay-plugin-api + @Override + public Future doConfirmAsync(String processorId, long sequenceNr, String channelId) { + return null; } - }; + } } diff --git a/akka-docs/rst/java/persistence.rst b/akka-docs/rst/java/persistence.rst index d0ee60d1620..8c7365bd446 100644 --- a/akka-docs/rst/java/persistence.rst +++ b/akka-docs/rst/java/persistence.rst @@ -4,9 +4,21 @@ Persistence ########### -This section describes an early access version of the Akka persistence module. Akka persistence is heavily inspired -by the `eventsourced`_ library. It follows the same concepts and architecture of `eventsourced`_ but significantly -differs on API and implementation level. +Akka persistence enables stateful actors to persist their internal state so that it can be recovered when an actor +is started, restarted by a supervisor or migrated in a cluster. It also allows stateful actors to recover from JVM +crashes, for example. The key concept behind Akka persistence is that only changes to an actor's internal state are +persisted but never its current state directly (except for optional snapshots). These changes are only ever appended +to storage, nothing is ever mutated, which allows for very high transaction rates and efficient replication. Stateful +actors are recovered by replaying stored changes to these actors from which they can rebuild internal state. This can +be either the full history of changes or starting from a snapshot of internal actor state which can dramatically +reduce recovery times. + +Storage backends for state changes and snapshots are pluggable in Akka persistence. Currently, these are written to +the local filesystem. Distributed and replicated storage, with the possibility of scaling writes, will be available +soon. + +Akka persistence is inspired by the `eventsourced`_ library. It follows the same concepts and architecture of +`eventsourced`_ but significantly differs on API and implementation level. .. warning:: @@ -31,13 +43,20 @@ Akka persistence is a separate jar file. Make sure that you have the following d Architecture ============ -* *Processor*: A processor is a persistent actor. Messages sent to a processor are written to a journal before - its ``onReceive`` method is called. When a processor is started or restarted, journaled messages are replayed +* *Processor*: A processor is a persistent, stateful actor. Messages sent to a processor are written to a journal + before its ``onReceive`` method is called. When a processor is started or restarted, journaled messages are replayed to that processor, so that it can recover internal state from these messages. * *Channel*: Channels are used by processors to communicate with other actors. They prevent that replayed messages are redundantly delivered to these actors. +* *Journal*: A journal stores the sequence of messages sent to a processor. An application can control which messages + are stored and which are received by the processor without being journaled. The storage backend of a journal is + pluggable. + +* *Snapshot store*: A snapshot store persists snapshots of a processor's internal state. Snapshots are used for + optimizing recovery times. The storage backend of a snapshot store is pluggable. + Use cases ========= @@ -69,10 +88,11 @@ A processor can be implemented by extending the abstract ``UntypedProcessor`` cl Processors only write messages of type ``Persistent`` to the journal, others are received without being persisted. When a processor's ``onReceive`` method is called with a ``Persistent`` message it can safely assume that this message -has been successfully written to the journal. If a journal fails to write a ``Persistent`` message then the processor -receives a ``PersistenceFailure`` message instead of a ``Persistent`` message. In this case, a processor may want to -inform the sender about the failure, so that the sender can re-send the message, if needed, under the assumption that -the journal recovered from a temporary failure. +has been successfully written to the journal. If a journal fails to write a ``Persistent`` message then the processor +is stopped, by default. If an application wants that a processors continues to run on persistence failures it must +handle ``PersistenceFailure`` messages. In this case, a processor may want to inform the sender about the failure, +so that the sender can re-send the message, if needed, under the assumption that the journal recovered from a +temporary failure. An ``UntypedProcessor`` itself is an ``Actor`` and can therefore be instantiated with ``actorOf``. @@ -268,16 +288,16 @@ A journal plugin either extends ``SyncWriteJournal`` or ``AsyncWriteJournal``. actor that should be extended when the storage backend API only supports synchronous, blocking writes. The methods to be implemented in this case are: -.. includecode:: code/docs/persistence/PersistencePluginDocTest.java#sync-write-plugin-api +.. includecode:: ../../../akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java#sync-write-plugin-api ``AsyncWriteJournal`` is an actor that should be extended if the storage backend API supports asynchronous, non-blocking writes. The methods to be implemented in that case are: -.. includecode:: code/docs/persistence/PersistencePluginDocTest.java#async-write-plugin-api +.. includecode:: ../../../akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java#async-write-plugin-api Message replays are always asynchronous, therefore, any journal plugin must implement: -.. includecode:: code/docs/persistence/PersistencePluginDocTest.java#async-replay-plugin-api +.. includecode:: ../../../akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java#async-replay-plugin-api A journal plugin can be activated with the following minimal configuration: @@ -292,7 +312,7 @@ Snapshot store plugin API A snapshot store plugin must extend the ``SnapshotStore`` actor and implement the following methods: -.. includecode:: code/docs/persistence/PersistencePluginDocTest.java#snapshot-store-plugin-api +.. includecode:: ../../../akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java#snapshot-store-plugin-api A snapshot store plugin can be activated with the following minimal configuration: @@ -301,10 +321,25 @@ A snapshot store plugin can be activated with the following minimal configuratio The specified plugin ``class`` must have a no-arg constructor. The ``plugin-dispatcher`` is the dispatcher used for the plugin actor. If not specified, it defaults to ``akka.persistence.dispatchers.default-plugin-dispatcher``. +Custom serialization +==================== + +Serialization of snapshots and payloads of ``Persistent`` messages is configurable with Akka's +:ref:`serialization-java` infrastructure. For example, if an application wants to serialize + +* payloads of type ``MyPayload`` with a custom ``MyPayloadSerializer`` and +* snapshots of type ``MySnapshot`` with a custom ``MySnapshotSerializer`` + +it must add + +.. includecode:: ../scala/code/docs/persistence/PersistenceSerializerDocSpec.scala#custom-serializer-config + +to the application configuration. If not specified, a default serializer is used, which is the ``JavaSerializer`` +in this example. + Upcoming features ================= * Reliable channels -* Custom serialization of messages and snapshots * Extended deletion of messages and snapshots * ... diff --git a/akka-docs/rst/java/serialization.rst b/akka-docs/rst/java/serialization.rst index 596197f944c..b04b8bfc50e 100644 --- a/akka-docs/rst/java/serialization.rst +++ b/akka-docs/rst/java/serialization.rst @@ -166,19 +166,7 @@ There is also a default remote address which is the one used by cluster support Deep serialization of Actors ---------------------------- -The current recommended approach to do deep serialization of internal actor state is to use Event Sourcing, -for more reading on the topic, see these examples: - -`Martin Krasser on EventSourcing Part1 `_ - -`Martin Krasser on EventSourcing Part2 `_ - - -.. note:: - - Built-in API support for persisting Actors will come in a later release, see the roadmap for more info: - - `Akka 2.0 roadmap `_ +The recommended approach to do deep serialization of internal actor state is to use Akka :ref:`persistence-java`. A Word About Java Serialization =============================== diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala index 29497dfbf84..a54ccdaac6e 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + package docs.persistence import akka.actor.ActorSystem diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala index 19901b8102e..0645619aa6b 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + package docs.persistence //#plugin-imports diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala new file mode 100644 index 00000000000..f8d036f5f20 --- /dev/null +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala @@ -0,0 +1,50 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package docs.persistence + +import com.typesafe.config._ + +import org.scalatest.WordSpec + +import akka.actor.ActorSystem +import akka.serialization.{ Serializer, SerializationExtension } + +class PersistenceSerializerDocSpec extends WordSpec { + + val customSerializerConfig = + """ + //#custom-serializer-config + akka.actor { + serializers { + my-payload = "docs.persistence.MyPayloadSerializer" + my-snapshot = "docs.persistence.MySnapshotSerializer" + } + serialization-bindings { + "docs.persistence.MyPayload" = my-payload + "docs.persistence.MySnapshot" = my-snapshot + } + } + //#custom-serializer-config + """.stripMargin + + SerializationExtension(ActorSystem("doc", ConfigFactory.parseString(customSerializerConfig))) +} + +class MyPayload +class MySnapshot + +class MyPayloadSerializer extends Serializer { + def identifier: Int = 77124 + def includeManifest: Boolean = false + def toBinary(o: AnyRef): Array[Byte] = ??? + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = ??? +} + +class MySnapshotSerializer extends Serializer { + def identifier: Int = 77125 + def includeManifest: Boolean = false + def toBinary(o: AnyRef): Array[Byte] = ??? + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = ??? +} diff --git a/akka-docs/rst/scala/persistence.rst b/akka-docs/rst/scala/persistence.rst index fc3f7e280d9..7dad84328eb 100644 --- a/akka-docs/rst/scala/persistence.rst +++ b/akka-docs/rst/scala/persistence.rst @@ -4,9 +4,21 @@ Persistence ########### -This section describes an early access version of the Akka persistence module. Akka persistence is heavily inspired -by the `eventsourced`_ library. It follows the same concepts and architecture of `eventsourced`_ but significantly -differs on API and implementation level. +Akka persistence enables stateful actors to persist their internal state so that it can be recovered when an actor +is started, restarted by a supervisor or migrated in a cluster. It also allows stateful actors to recover from JVM +crashes, for example. The key concept behind Akka persistence is that only changes to an actor's internal state are +persisted but never its current state directly (except for optional snapshots). These changes are only ever appended +to storage, nothing is ever mutated, which allows for very high transaction rates and efficient replication. Stateful +actors are recovered by replaying stored changes to these actors from which they can rebuild internal state. This can +be either the full history of changes or starting from a snapshot of internal actor state which can dramatically +reduce recovery times. + +Storage backends for state changes and snapshots are pluggable in Akka persistence. Currently, these are written to +the local filesystem. Distributed and replicated storage, with the possibility of scaling writes, will be available +soon. + +Akka persistence is inspired by the `eventsourced`_ library. It follows the same concepts and architecture of +`eventsourced`_ but significantly differs on API and implementation level. .. warning:: @@ -27,13 +39,20 @@ Akka persistence is a separate jar file. Make sure that you have the following d Architecture ============ -* *Processor*: A processor is a persistent actor. Messages sent to a processor are written to a journal before - its ``receive`` method is called. When a processor is started or restarted, journaled messages are replayed +* *Processor*: A processor is a persistent, stateful actor. Messages sent to a processor are written to a journal + before its ``receive`` method is called. When a processor is started or restarted, journaled messages are replayed to that processor, so that it can recover internal state from these messages. * *Channel*: Channels are used by processors to communicate with other actors. They prevent that replayed messages are redundantly delivered to these actors. +* *Journal*: A journal stores the sequence of messages sent to a processor. An application can control which messages + are stored and which are received by the processor without being journaled. The storage backend of a journal is + pluggable. + +* *Snapshot store*: A snapshot store persists snapshots of a processor's internal state. Snapshots are used for + optimizing recovery times. The storage backend of a snapshot store is pluggable. + Use cases ========= @@ -65,9 +84,10 @@ A processor can be implemented by extending the ``Processor`` trait and implemen Processors only write messages of type ``Persistent`` to the journal, others are received without being persisted. When a processor's ``receive`` method is called with a ``Persistent`` message it can safely assume that this message has been successfully written to the journal. If a journal fails to write a ``Persistent`` message then the processor -receives a ``PersistenceFailure`` message instead of a ``Persistent`` message. In this case, a processor may want to -inform the sender about the failure, so that the sender can re-send the message, if needed, under the assumption that -the journal recovered from a temporary failure. +is stopped, by default. If an application wants that a processors continues to run on persistence failures it must +handle ``PersistenceFailure`` messages. In this case, a processor may want to inform the sender about the failure, +so that the sender can re-send the message, if needed, under the assumption that the journal recovered from a +temporary failure. A ``Processor`` itself is an ``Actor`` and can therefore be instantiated with ``actorOf``. @@ -312,6 +332,22 @@ A snapshot store plugin can be activated with the following minimal configuratio The specified plugin ``class`` must have a no-arg constructor. The ``plugin-dispatcher`` is the dispatcher used for the plugin actor. If not specified, it defaults to ``akka.persistence.dispatchers.default-plugin-dispatcher``. +Custom serialization +==================== + +Serialization of snapshots and payloads of ``Persistent`` messages is configurable with Akka's +:ref:`serialization-scala` infrastructure. For example, if an application wants to serialize + +* payloads of type ``MyPayload`` with a custom ``MyPayloadSerializer`` and +* snapshots of type ``MySnapshot`` with a custom ``MySnapshotSerializer`` + +it must add + +.. includecode:: code/docs/persistence/PersistenceSerializerDocSpec.scala#custom-serializer-config + +to the application configuration. If not specified, a default serializer is used, which is the ``JavaSerializer`` +in this example. + Miscellaneous ============= @@ -326,6 +362,5 @@ Upcoming features ================= * Reliable channels -* Custom serialization of messages and snapshots * Extended deletion of messages and snapshots * ... diff --git a/akka-docs/rst/scala/serialization.rst b/akka-docs/rst/scala/serialization.rst index 07af2afc420..c5240e4f145 100644 --- a/akka-docs/rst/scala/serialization.rst +++ b/akka-docs/rst/scala/serialization.rst @@ -155,19 +155,7 @@ There is also a default remote address which is the one used by cluster support Deep serialization of Actors ---------------------------- -The current recommended approach to do deep serialization of internal actor state is to use Event Sourcing, -for more reading on the topic, see these examples: - -`Martin Krasser on EventSourcing Part1 `_ - -`Martin Krasser on EventSourcing Part2 `_ - - -.. note:: - - Built-in API support for persisting Actors will come in a later release, see the roadmap for more info: - - `Akka 2.0 roadmap `_ +The recommended approach to do deep serialization of internal actor state is to use Akka :ref:`persistence`. A Word About Java Serialization =============================== diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java new file mode 100644 index 00000000000..5c0e8ed0c1d --- /dev/null +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java @@ -0,0 +1,39 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence.journal.japi; + +import scala.concurrent.Future; + +import akka.japi.Procedure; +import akka.persistence.PersistentImpl; + +interface AsyncReplayPlugin { + //#async-replay-plugin-api + /** + * Plugin Java API. + * + * Asynchronously replays persistent messages. Implementations replay a message + * by calling `replayCallback`. The returned future must be completed when all + * messages (matching the sequence number bounds) have been replayed. The future + * `Long` value must be the highest stored sequence number in the journal for the + * specified processor. The future must be completed with a failure if any of + * the persistent messages could not be replayed. + * + * The `replayCallback` must also be called with messages that have been marked + * as deleted. In this case a replayed message's `deleted` field must be set to + * `true`. + * + * The channel ids of delivery confirmations that are available for a replayed + * message must be contained in that message's `confirms` sequence. + * + * @param processorId processor id. + * @param fromSequenceNr sequence number where replay should start. + * @param toSequenceNr sequence number where replay should end (inclusive). + * @param replayCallback called to replay a single message. Can be called from any + * thread. + */ + Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback); + //#async-replay-plugin-api +} diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java new file mode 100644 index 00000000000..01ec3c2c742 --- /dev/null +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java @@ -0,0 +1,34 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence.journal.japi; + +import scala.concurrent.Future; + +import akka.persistence.PersistentImpl; + +interface AsyncWritePlugin { + //#async-write-plugin-api + /** + * Plugin Java API. + * + * Asynchronously writes a `persistent` message to the journal. + */ + Future doWriteAsync(PersistentImpl persistent); + + /** + * Plugin Java API. + * + * Asynchronously marks a `persistent` message as deleted. + */ + Future doDeleteAsync(PersistentImpl persistent); + + /** + * Plugin Java API. + * + * Asynchronously writes a delivery confirmation to the journal. + */ + Future doConfirmAsync(String processorId, long sequenceNr, String channelId); + //#async-write-plugin-api +} diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java new file mode 100644 index 00000000000..dc022ed6449 --- /dev/null +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java @@ -0,0 +1,32 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence.journal.japi; + +import akka.persistence.PersistentImpl; + +interface SyncWritePlugin { + //#sync-write-plugin-api + /** + * Plugin Java API. + * + * Synchronously writes a `persistent` message to the journal. + */ + void doWrite(PersistentImpl persistent) throws Exception; + + /** + * Plugin Java API. + * + * Synchronously marks a `persistent` message as deleted. + */ + void doDelete(PersistentImpl persistent) throws Exception; + + /** + * Plugin Java API. + * + * Synchronously writes a delivery confirmation to the journal. + */ + void doConfirm(String processorId, long sequenceNr, String channelId) throws Exception; + //#sync-write-plugin-api +} diff --git a/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java new file mode 100644 index 00000000000..254e7fe3d9b --- /dev/null +++ b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java @@ -0,0 +1,3132 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MessageFormats.proto + +package akka.persistence.serialization; + +public final class MessageFormats { + private MessageFormats() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface PersistentMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .PersistentPayload payload = 1; + /** + * optional .PersistentPayload payload = 1; + */ + boolean hasPayload(); + /** + * optional .PersistentPayload payload = 1; + */ + akka.persistence.serialization.MessageFormats.PersistentPayload getPayload(); + /** + * optional .PersistentPayload payload = 1; + */ + akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder getPayloadOrBuilder(); + + // optional int64 sequenceNr = 2; + /** + * optional int64 sequenceNr = 2; + */ + boolean hasSequenceNr(); + /** + * optional int64 sequenceNr = 2; + */ + long getSequenceNr(); + + // optional string processorId = 3; + /** + * optional string processorId = 3; + */ + boolean hasProcessorId(); + /** + * optional string processorId = 3; + */ + java.lang.String getProcessorId(); + /** + * optional string processorId = 3; + */ + com.google.protobuf.ByteString + getProcessorIdBytes(); + + // optional string channelId = 4; + /** + * optional string channelId = 4; + */ + boolean hasChannelId(); + /** + * optional string channelId = 4; + */ + java.lang.String getChannelId(); + /** + * optional string channelId = 4; + */ + com.google.protobuf.ByteString + getChannelIdBytes(); + + // optional bool deleted = 5; + /** + * optional bool deleted = 5; + */ + boolean hasDeleted(); + /** + * optional bool deleted = 5; + */ + boolean getDeleted(); + + // optional bool resolved = 6; + /** + * optional bool resolved = 6; + */ + boolean hasResolved(); + /** + * optional bool resolved = 6; + */ + boolean getResolved(); + + // repeated string confirms = 8; + /** + * repeated string confirms = 8; + */ + java.util.List + getConfirmsList(); + /** + * repeated string confirms = 8; + */ + int getConfirmsCount(); + /** + * repeated string confirms = 8; + */ + java.lang.String getConfirms(int index); + /** + * repeated string confirms = 8; + */ + com.google.protobuf.ByteString + getConfirmsBytes(int index); + + // optional .ConfirmMessage confirmMessage = 10; + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + boolean hasConfirmMessage(); + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + akka.persistence.serialization.MessageFormats.ConfirmMessage getConfirmMessage(); + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder getConfirmMessageOrBuilder(); + + // optional string confirmTarget = 9; + /** + * optional string confirmTarget = 9; + */ + boolean hasConfirmTarget(); + /** + * optional string confirmTarget = 9; + */ + java.lang.String getConfirmTarget(); + /** + * optional string confirmTarget = 9; + */ + com.google.protobuf.ByteString + getConfirmTargetBytes(); + + // optional string sender = 7; + /** + * optional string sender = 7; + */ + boolean hasSender(); + /** + * optional string sender = 7; + */ + java.lang.String getSender(); + /** + * optional string sender = 7; + */ + com.google.protobuf.ByteString + getSenderBytes(); + } + /** + * Protobuf type {@code PersistentMessage} + */ + public static final class PersistentMessage extends + com.google.protobuf.GeneratedMessage + implements PersistentMessageOrBuilder { + // Use PersistentMessage.newBuilder() to construct. + private PersistentMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PersistentMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PersistentMessage defaultInstance; + public static PersistentMessage getDefaultInstance() { + return defaultInstance; + } + + public PersistentMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PersistentMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + akka.persistence.serialization.MessageFormats.PersistentPayload.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = payload_.toBuilder(); + } + payload_ = input.readMessage(akka.persistence.serialization.MessageFormats.PersistentPayload.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(payload_); + payload_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + sequenceNr_ = input.readInt64(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + processorId_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + channelId_ = input.readBytes(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + deleted_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + resolved_ = input.readBool(); + break; + } + case 58: { + bitField0_ |= 0x00000100; + sender_ = input.readBytes(); + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + confirms_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000040; + } + confirms_.add(input.readBytes()); + break; + } + case 74: { + bitField0_ |= 0x00000080; + confirmTarget_ = input.readBytes(); + break; + } + case 82: { + akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + subBuilder = confirmMessage_.toBuilder(); + } + confirmMessage_ = input.readMessage(akka.persistence.serialization.MessageFormats.ConfirmMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(confirmMessage_); + confirmMessage_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000040; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + confirms_ = new com.google.protobuf.UnmodifiableLazyStringList(confirms_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.PersistentMessage.class, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PersistentMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PersistentMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional .PersistentPayload payload = 1; + public static final int PAYLOAD_FIELD_NUMBER = 1; + private akka.persistence.serialization.MessageFormats.PersistentPayload payload_; + /** + * optional .PersistentPayload payload = 1; + */ + public boolean hasPayload() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .PersistentPayload payload = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentPayload getPayload() { + return payload_; + } + /** + * optional .PersistentPayload payload = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder getPayloadOrBuilder() { + return payload_; + } + + // optional int64 sequenceNr = 2; + public static final int SEQUENCENR_FIELD_NUMBER = 2; + private long sequenceNr_; + /** + * optional int64 sequenceNr = 2; + */ + public boolean hasSequenceNr() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 sequenceNr = 2; + */ + public long getSequenceNr() { + return sequenceNr_; + } + + // optional string processorId = 3; + public static final int PROCESSORID_FIELD_NUMBER = 3; + private java.lang.Object processorId_; + /** + * optional string processorId = 3; + */ + public boolean hasProcessorId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string processorId = 3; + */ + public java.lang.String getProcessorId() { + java.lang.Object ref = processorId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + processorId_ = s; + } + return s; + } + } + /** + * optional string processorId = 3; + */ + public com.google.protobuf.ByteString + getProcessorIdBytes() { + java.lang.Object ref = processorId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + processorId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string channelId = 4; + public static final int CHANNELID_FIELD_NUMBER = 4; + private java.lang.Object channelId_; + /** + * optional string channelId = 4; + */ + public boolean hasChannelId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string channelId = 4; + */ + public java.lang.String getChannelId() { + java.lang.Object ref = channelId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + channelId_ = s; + } + return s; + } + } + /** + * optional string channelId = 4; + */ + public com.google.protobuf.ByteString + getChannelIdBytes() { + java.lang.Object ref = channelId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + channelId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bool deleted = 5; + public static final int DELETED_FIELD_NUMBER = 5; + private boolean deleted_; + /** + * optional bool deleted = 5; + */ + public boolean hasDeleted() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bool deleted = 5; + */ + public boolean getDeleted() { + return deleted_; + } + + // optional bool resolved = 6; + public static final int RESOLVED_FIELD_NUMBER = 6; + private boolean resolved_; + /** + * optional bool resolved = 6; + */ + public boolean hasResolved() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional bool resolved = 6; + */ + public boolean getResolved() { + return resolved_; + } + + // repeated string confirms = 8; + public static final int CONFIRMS_FIELD_NUMBER = 8; + private com.google.protobuf.LazyStringList confirms_; + /** + * repeated string confirms = 8; + */ + public java.util.List + getConfirmsList() { + return confirms_; + } + /** + * repeated string confirms = 8; + */ + public int getConfirmsCount() { + return confirms_.size(); + } + /** + * repeated string confirms = 8; + */ + public java.lang.String getConfirms(int index) { + return confirms_.get(index); + } + /** + * repeated string confirms = 8; + */ + public com.google.protobuf.ByteString + getConfirmsBytes(int index) { + return confirms_.getByteString(index); + } + + // optional .ConfirmMessage confirmMessage = 10; + public static final int CONFIRMMESSAGE_FIELD_NUMBER = 10; + private akka.persistence.serialization.MessageFormats.ConfirmMessage confirmMessage_; + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public boolean hasConfirmMessage() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public akka.persistence.serialization.MessageFormats.ConfirmMessage getConfirmMessage() { + return confirmMessage_; + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder getConfirmMessageOrBuilder() { + return confirmMessage_; + } + + // optional string confirmTarget = 9; + public static final int CONFIRMTARGET_FIELD_NUMBER = 9; + private java.lang.Object confirmTarget_; + /** + * optional string confirmTarget = 9; + */ + public boolean hasConfirmTarget() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional string confirmTarget = 9; + */ + public java.lang.String getConfirmTarget() { + java.lang.Object ref = confirmTarget_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + confirmTarget_ = s; + } + return s; + } + } + /** + * optional string confirmTarget = 9; + */ + public com.google.protobuf.ByteString + getConfirmTargetBytes() { + java.lang.Object ref = confirmTarget_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + confirmTarget_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string sender = 7; + public static final int SENDER_FIELD_NUMBER = 7; + private java.lang.Object sender_; + /** + * optional string sender = 7; + */ + public boolean hasSender() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional string sender = 7; + */ + public java.lang.String getSender() { + java.lang.Object ref = sender_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + sender_ = s; + } + return s; + } + } + /** + * optional string sender = 7; + */ + public com.google.protobuf.ByteString + getSenderBytes() { + java.lang.Object ref = sender_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + sender_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + payload_ = akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); + sequenceNr_ = 0L; + processorId_ = ""; + channelId_ = ""; + deleted_ = false; + resolved_ = false; + confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); + confirmTarget_ = ""; + sender_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasPayload()) { + if (!getPayload().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, payload_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt64(2, sequenceNr_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getProcessorIdBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getChannelIdBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, deleted_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(6, resolved_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeBytes(7, getSenderBytes()); + } + for (int i = 0; i < confirms_.size(); i++) { + output.writeBytes(8, confirms_.getByteString(i)); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeBytes(9, getConfirmTargetBytes()); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeMessage(10, confirmMessage_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, payload_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, sequenceNr_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getProcessorIdBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getChannelIdBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, deleted_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, resolved_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(7, getSenderBytes()); + } + { + int dataSize = 0; + for (int i = 0; i < confirms_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(confirms_.getByteString(i)); + } + size += dataSize; + size += 1 * getConfirmsList().size(); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(9, getConfirmTargetBytes()); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, confirmMessage_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(akka.persistence.serialization.MessageFormats.PersistentMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code PersistentMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.PersistentMessage.class, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder.class); + } + + // Construct using akka.persistence.serialization.MessageFormats.PersistentMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPayloadFieldBuilder(); + getConfirmMessageFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (payloadBuilder_ == null) { + payload_ = akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); + } else { + payloadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + sequenceNr_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + processorId_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + channelId_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + deleted_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + resolved_ = false; + bitField0_ = (bitField0_ & ~0x00000020); + confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000040); + if (confirmMessageBuilder_ == null) { + confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); + } else { + confirmMessageBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + confirmTarget_ = ""; + bitField0_ = (bitField0_ & ~0x00000100); + sender_ = ""; + bitField0_ = (bitField0_ & ~0x00000200); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessage_descriptor; + } + + public akka.persistence.serialization.MessageFormats.PersistentMessage getDefaultInstanceForType() { + return akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); + } + + public akka.persistence.serialization.MessageFormats.PersistentMessage build() { + akka.persistence.serialization.MessageFormats.PersistentMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public akka.persistence.serialization.MessageFormats.PersistentMessage buildPartial() { + akka.persistence.serialization.MessageFormats.PersistentMessage result = new akka.persistence.serialization.MessageFormats.PersistentMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (payloadBuilder_ == null) { + result.payload_ = payload_; + } else { + result.payload_ = payloadBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.sequenceNr_ = sequenceNr_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.processorId_ = processorId_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.channelId_ = channelId_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.deleted_ = deleted_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.resolved_ = resolved_; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + confirms_ = new com.google.protobuf.UnmodifiableLazyStringList( + confirms_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.confirms_ = confirms_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000040; + } + if (confirmMessageBuilder_ == null) { + result.confirmMessage_ = confirmMessage_; + } else { + result.confirmMessage_ = confirmMessageBuilder_.build(); + } + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000080; + } + result.confirmTarget_ = confirmTarget_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000100; + } + result.sender_ = sender_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof akka.persistence.serialization.MessageFormats.PersistentMessage) { + return mergeFrom((akka.persistence.serialization.MessageFormats.PersistentMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(akka.persistence.serialization.MessageFormats.PersistentMessage other) { + if (other == akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance()) return this; + if (other.hasPayload()) { + mergePayload(other.getPayload()); + } + if (other.hasSequenceNr()) { + setSequenceNr(other.getSequenceNr()); + } + if (other.hasProcessorId()) { + bitField0_ |= 0x00000004; + processorId_ = other.processorId_; + onChanged(); + } + if (other.hasChannelId()) { + bitField0_ |= 0x00000008; + channelId_ = other.channelId_; + onChanged(); + } + if (other.hasDeleted()) { + setDeleted(other.getDeleted()); + } + if (other.hasResolved()) { + setResolved(other.getResolved()); + } + if (!other.confirms_.isEmpty()) { + if (confirms_.isEmpty()) { + confirms_ = other.confirms_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureConfirmsIsMutable(); + confirms_.addAll(other.confirms_); + } + onChanged(); + } + if (other.hasConfirmMessage()) { + mergeConfirmMessage(other.getConfirmMessage()); + } + if (other.hasConfirmTarget()) { + bitField0_ |= 0x00000100; + confirmTarget_ = other.confirmTarget_; + onChanged(); + } + if (other.hasSender()) { + bitField0_ |= 0x00000200; + sender_ = other.sender_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasPayload()) { + if (!getPayload().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + akka.persistence.serialization.MessageFormats.PersistentMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (akka.persistence.serialization.MessageFormats.PersistentMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional .PersistentPayload payload = 1; + private akka.persistence.serialization.MessageFormats.PersistentPayload payload_ = akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentPayload, akka.persistence.serialization.MessageFormats.PersistentPayload.Builder, akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder> payloadBuilder_; + /** + * optional .PersistentPayload payload = 1; + */ + public boolean hasPayload() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .PersistentPayload payload = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentPayload getPayload() { + if (payloadBuilder_ == null) { + return payload_; + } else { + return payloadBuilder_.getMessage(); + } + } + /** + * optional .PersistentPayload payload = 1; + */ + public Builder setPayload(akka.persistence.serialization.MessageFormats.PersistentPayload value) { + if (payloadBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + payloadBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentPayload payload = 1; + */ + public Builder setPayload( + akka.persistence.serialization.MessageFormats.PersistentPayload.Builder builderForValue) { + if (payloadBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + payloadBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentPayload payload = 1; + */ + public Builder mergePayload(akka.persistence.serialization.MessageFormats.PersistentPayload value) { + if (payloadBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + payload_ != akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance()) { + payload_ = + akka.persistence.serialization.MessageFormats.PersistentPayload.newBuilder(payload_).mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + payloadBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentPayload payload = 1; + */ + public Builder clearPayload() { + if (payloadBuilder_ == null) { + payload_ = akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); + onChanged(); + } else { + payloadBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * optional .PersistentPayload payload = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentPayload.Builder getPayloadBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPayloadFieldBuilder().getBuilder(); + } + /** + * optional .PersistentPayload payload = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder getPayloadOrBuilder() { + if (payloadBuilder_ != null) { + return payloadBuilder_.getMessageOrBuilder(); + } else { + return payload_; + } + } + /** + * optional .PersistentPayload payload = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentPayload, akka.persistence.serialization.MessageFormats.PersistentPayload.Builder, akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder> + getPayloadFieldBuilder() { + if (payloadBuilder_ == null) { + payloadBuilder_ = new com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentPayload, akka.persistence.serialization.MessageFormats.PersistentPayload.Builder, akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder>( + payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + return payloadBuilder_; + } + + // optional int64 sequenceNr = 2; + private long sequenceNr_ ; + /** + * optional int64 sequenceNr = 2; + */ + public boolean hasSequenceNr() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 sequenceNr = 2; + */ + public long getSequenceNr() { + return sequenceNr_; + } + /** + * optional int64 sequenceNr = 2; + */ + public Builder setSequenceNr(long value) { + bitField0_ |= 0x00000002; + sequenceNr_ = value; + onChanged(); + return this; + } + /** + * optional int64 sequenceNr = 2; + */ + public Builder clearSequenceNr() { + bitField0_ = (bitField0_ & ~0x00000002); + sequenceNr_ = 0L; + onChanged(); + return this; + } + + // optional string processorId = 3; + private java.lang.Object processorId_ = ""; + /** + * optional string processorId = 3; + */ + public boolean hasProcessorId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string processorId = 3; + */ + public java.lang.String getProcessorId() { + java.lang.Object ref = processorId_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + processorId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string processorId = 3; + */ + public com.google.protobuf.ByteString + getProcessorIdBytes() { + java.lang.Object ref = processorId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + processorId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string processorId = 3; + */ + public Builder setProcessorId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + processorId_ = value; + onChanged(); + return this; + } + /** + * optional string processorId = 3; + */ + public Builder clearProcessorId() { + bitField0_ = (bitField0_ & ~0x00000004); + processorId_ = getDefaultInstance().getProcessorId(); + onChanged(); + return this; + } + /** + * optional string processorId = 3; + */ + public Builder setProcessorIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + processorId_ = value; + onChanged(); + return this; + } + + // optional string channelId = 4; + private java.lang.Object channelId_ = ""; + /** + * optional string channelId = 4; + */ + public boolean hasChannelId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string channelId = 4; + */ + public java.lang.String getChannelId() { + java.lang.Object ref = channelId_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + channelId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string channelId = 4; + */ + public com.google.protobuf.ByteString + getChannelIdBytes() { + java.lang.Object ref = channelId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + channelId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string channelId = 4; + */ + public Builder setChannelId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + channelId_ = value; + onChanged(); + return this; + } + /** + * optional string channelId = 4; + */ + public Builder clearChannelId() { + bitField0_ = (bitField0_ & ~0x00000008); + channelId_ = getDefaultInstance().getChannelId(); + onChanged(); + return this; + } + /** + * optional string channelId = 4; + */ + public Builder setChannelIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + channelId_ = value; + onChanged(); + return this; + } + + // optional bool deleted = 5; + private boolean deleted_ ; + /** + * optional bool deleted = 5; + */ + public boolean hasDeleted() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bool deleted = 5; + */ + public boolean getDeleted() { + return deleted_; + } + /** + * optional bool deleted = 5; + */ + public Builder setDeleted(boolean value) { + bitField0_ |= 0x00000010; + deleted_ = value; + onChanged(); + return this; + } + /** + * optional bool deleted = 5; + */ + public Builder clearDeleted() { + bitField0_ = (bitField0_ & ~0x00000010); + deleted_ = false; + onChanged(); + return this; + } + + // optional bool resolved = 6; + private boolean resolved_ ; + /** + * optional bool resolved = 6; + */ + public boolean hasResolved() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional bool resolved = 6; + */ + public boolean getResolved() { + return resolved_; + } + /** + * optional bool resolved = 6; + */ + public Builder setResolved(boolean value) { + bitField0_ |= 0x00000020; + resolved_ = value; + onChanged(); + return this; + } + /** + * optional bool resolved = 6; + */ + public Builder clearResolved() { + bitField0_ = (bitField0_ & ~0x00000020); + resolved_ = false; + onChanged(); + return this; + } + + // repeated string confirms = 8; + private com.google.protobuf.LazyStringList confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureConfirmsIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + confirms_ = new com.google.protobuf.LazyStringArrayList(confirms_); + bitField0_ |= 0x00000040; + } + } + /** + * repeated string confirms = 8; + */ + public java.util.List + getConfirmsList() { + return java.util.Collections.unmodifiableList(confirms_); + } + /** + * repeated string confirms = 8; + */ + public int getConfirmsCount() { + return confirms_.size(); + } + /** + * repeated string confirms = 8; + */ + public java.lang.String getConfirms(int index) { + return confirms_.get(index); + } + /** + * repeated string confirms = 8; + */ + public com.google.protobuf.ByteString + getConfirmsBytes(int index) { + return confirms_.getByteString(index); + } + /** + * repeated string confirms = 8; + */ + public Builder setConfirms( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureConfirmsIsMutable(); + confirms_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string confirms = 8; + */ + public Builder addConfirms( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureConfirmsIsMutable(); + confirms_.add(value); + onChanged(); + return this; + } + /** + * repeated string confirms = 8; + */ + public Builder addAllConfirms( + java.lang.Iterable values) { + ensureConfirmsIsMutable(); + super.addAll(values, confirms_); + onChanged(); + return this; + } + /** + * repeated string confirms = 8; + */ + public Builder clearConfirms() { + confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + * repeated string confirms = 8; + */ + public Builder addConfirmsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureConfirmsIsMutable(); + confirms_.add(value); + onChanged(); + return this; + } + + // optional .ConfirmMessage confirmMessage = 10; + private akka.persistence.serialization.MessageFormats.ConfirmMessage confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.ConfirmMessage, akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder, akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder> confirmMessageBuilder_; + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public boolean hasConfirmMessage() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public akka.persistence.serialization.MessageFormats.ConfirmMessage getConfirmMessage() { + if (confirmMessageBuilder_ == null) { + return confirmMessage_; + } else { + return confirmMessageBuilder_.getMessage(); + } + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public Builder setConfirmMessage(akka.persistence.serialization.MessageFormats.ConfirmMessage value) { + if (confirmMessageBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + confirmMessage_ = value; + onChanged(); + } else { + confirmMessageBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + return this; + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public Builder setConfirmMessage( + akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder builderForValue) { + if (confirmMessageBuilder_ == null) { + confirmMessage_ = builderForValue.build(); + onChanged(); + } else { + confirmMessageBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + return this; + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public Builder mergeConfirmMessage(akka.persistence.serialization.MessageFormats.ConfirmMessage value) { + if (confirmMessageBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080) && + confirmMessage_ != akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance()) { + confirmMessage_ = + akka.persistence.serialization.MessageFormats.ConfirmMessage.newBuilder(confirmMessage_).mergeFrom(value).buildPartial(); + } else { + confirmMessage_ = value; + } + onChanged(); + } else { + confirmMessageBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + return this; + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public Builder clearConfirmMessage() { + if (confirmMessageBuilder_ == null) { + confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); + onChanged(); + } else { + confirmMessageBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder getConfirmMessageBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getConfirmMessageFieldBuilder().getBuilder(); + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + public akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder getConfirmMessageOrBuilder() { + if (confirmMessageBuilder_ != null) { + return confirmMessageBuilder_.getMessageOrBuilder(); + } else { + return confirmMessage_; + } + } + /** + * optional .ConfirmMessage confirmMessage = 10; + */ + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.ConfirmMessage, akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder, akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder> + getConfirmMessageFieldBuilder() { + if (confirmMessageBuilder_ == null) { + confirmMessageBuilder_ = new com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.ConfirmMessage, akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder, akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder>( + confirmMessage_, + getParentForChildren(), + isClean()); + confirmMessage_ = null; + } + return confirmMessageBuilder_; + } + + // optional string confirmTarget = 9; + private java.lang.Object confirmTarget_ = ""; + /** + * optional string confirmTarget = 9; + */ + public boolean hasConfirmTarget() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional string confirmTarget = 9; + */ + public java.lang.String getConfirmTarget() { + java.lang.Object ref = confirmTarget_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + confirmTarget_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string confirmTarget = 9; + */ + public com.google.protobuf.ByteString + getConfirmTargetBytes() { + java.lang.Object ref = confirmTarget_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + confirmTarget_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string confirmTarget = 9; + */ + public Builder setConfirmTarget( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000100; + confirmTarget_ = value; + onChanged(); + return this; + } + /** + * optional string confirmTarget = 9; + */ + public Builder clearConfirmTarget() { + bitField0_ = (bitField0_ & ~0x00000100); + confirmTarget_ = getDefaultInstance().getConfirmTarget(); + onChanged(); + return this; + } + /** + * optional string confirmTarget = 9; + */ + public Builder setConfirmTargetBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000100; + confirmTarget_ = value; + onChanged(); + return this; + } + + // optional string sender = 7; + private java.lang.Object sender_ = ""; + /** + * optional string sender = 7; + */ + public boolean hasSender() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional string sender = 7; + */ + public java.lang.String getSender() { + java.lang.Object ref = sender_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + sender_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string sender = 7; + */ + public com.google.protobuf.ByteString + getSenderBytes() { + java.lang.Object ref = sender_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + sender_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string sender = 7; + */ + public Builder setSender( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000200; + sender_ = value; + onChanged(); + return this; + } + /** + * optional string sender = 7; + */ + public Builder clearSender() { + bitField0_ = (bitField0_ & ~0x00000200); + sender_ = getDefaultInstance().getSender(); + onChanged(); + return this; + } + /** + * optional string sender = 7; + */ + public Builder setSenderBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000200; + sender_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:PersistentMessage) + } + + static { + defaultInstance = new PersistentMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PersistentMessage) + } + + public interface PersistentPayloadOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 serializerId = 1; + /** + * required int32 serializerId = 1; + */ + boolean hasSerializerId(); + /** + * required int32 serializerId = 1; + */ + int getSerializerId(); + + // required bytes payload = 2; + /** + * required bytes payload = 2; + */ + boolean hasPayload(); + /** + * required bytes payload = 2; + */ + com.google.protobuf.ByteString getPayload(); + + // optional bytes payloadManifest = 3; + /** + * optional bytes payloadManifest = 3; + */ + boolean hasPayloadManifest(); + /** + * optional bytes payloadManifest = 3; + */ + com.google.protobuf.ByteString getPayloadManifest(); + } + /** + * Protobuf type {@code PersistentPayload} + */ + public static final class PersistentPayload extends + com.google.protobuf.GeneratedMessage + implements PersistentPayloadOrBuilder { + // Use PersistentPayload.newBuilder() to construct. + private PersistentPayload(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PersistentPayload(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PersistentPayload defaultInstance; + public static PersistentPayload getDefaultInstance() { + return defaultInstance; + } + + public PersistentPayload getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PersistentPayload( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + serializerId_ = input.readInt32(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + payload_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + payloadManifest_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentPayload_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentPayload_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.PersistentPayload.class, akka.persistence.serialization.MessageFormats.PersistentPayload.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PersistentPayload parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PersistentPayload(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required int32 serializerId = 1; + public static final int SERIALIZERID_FIELD_NUMBER = 1; + private int serializerId_; + /** + * required int32 serializerId = 1; + */ + public boolean hasSerializerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int32 serializerId = 1; + */ + public int getSerializerId() { + return serializerId_; + } + + // required bytes payload = 2; + public static final int PAYLOAD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString payload_; + /** + * required bytes payload = 2; + */ + public boolean hasPayload() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes payload = 2; + */ + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + + // optional bytes payloadManifest = 3; + public static final int PAYLOADMANIFEST_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString payloadManifest_; + /** + * optional bytes payloadManifest = 3; + */ + public boolean hasPayloadManifest() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes payloadManifest = 3; + */ + public com.google.protobuf.ByteString getPayloadManifest() { + return payloadManifest_; + } + + private void initFields() { + serializerId_ = 0; + payload_ = com.google.protobuf.ByteString.EMPTY; + payloadManifest_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSerializerId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPayload()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, serializerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, payload_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, payloadManifest_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, serializerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, payload_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, payloadManifest_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.PersistentPayload parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(akka.persistence.serialization.MessageFormats.PersistentPayload prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code PersistentPayload} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements akka.persistence.serialization.MessageFormats.PersistentPayloadOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentPayload_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentPayload_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.PersistentPayload.class, akka.persistence.serialization.MessageFormats.PersistentPayload.Builder.class); + } + + // Construct using akka.persistence.serialization.MessageFormats.PersistentPayload.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + serializerId_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + payload_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + payloadManifest_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return akka.persistence.serialization.MessageFormats.internal_static_PersistentPayload_descriptor; + } + + public akka.persistence.serialization.MessageFormats.PersistentPayload getDefaultInstanceForType() { + return akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); + } + + public akka.persistence.serialization.MessageFormats.PersistentPayload build() { + akka.persistence.serialization.MessageFormats.PersistentPayload result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public akka.persistence.serialization.MessageFormats.PersistentPayload buildPartial() { + akka.persistence.serialization.MessageFormats.PersistentPayload result = new akka.persistence.serialization.MessageFormats.PersistentPayload(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.serializerId_ = serializerId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.payload_ = payload_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.payloadManifest_ = payloadManifest_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof akka.persistence.serialization.MessageFormats.PersistentPayload) { + return mergeFrom((akka.persistence.serialization.MessageFormats.PersistentPayload)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(akka.persistence.serialization.MessageFormats.PersistentPayload other) { + if (other == akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance()) return this; + if (other.hasSerializerId()) { + setSerializerId(other.getSerializerId()); + } + if (other.hasPayload()) { + setPayload(other.getPayload()); + } + if (other.hasPayloadManifest()) { + setPayloadManifest(other.getPayloadManifest()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSerializerId()) { + + return false; + } + if (!hasPayload()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + akka.persistence.serialization.MessageFormats.PersistentPayload parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (akka.persistence.serialization.MessageFormats.PersistentPayload) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required int32 serializerId = 1; + private int serializerId_ ; + /** + * required int32 serializerId = 1; + */ + public boolean hasSerializerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int32 serializerId = 1; + */ + public int getSerializerId() { + return serializerId_; + } + /** + * required int32 serializerId = 1; + */ + public Builder setSerializerId(int value) { + bitField0_ |= 0x00000001; + serializerId_ = value; + onChanged(); + return this; + } + /** + * required int32 serializerId = 1; + */ + public Builder clearSerializerId() { + bitField0_ = (bitField0_ & ~0x00000001); + serializerId_ = 0; + onChanged(); + return this; + } + + // required bytes payload = 2; + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes payload = 2; + */ + public boolean hasPayload() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes payload = 2; + */ + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + /** + * required bytes payload = 2; + */ + public Builder setPayload(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + payload_ = value; + onChanged(); + return this; + } + /** + * required bytes payload = 2; + */ + public Builder clearPayload() { + bitField0_ = (bitField0_ & ~0x00000002); + payload_ = getDefaultInstance().getPayload(); + onChanged(); + return this; + } + + // optional bytes payloadManifest = 3; + private com.google.protobuf.ByteString payloadManifest_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes payloadManifest = 3; + */ + public boolean hasPayloadManifest() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes payloadManifest = 3; + */ + public com.google.protobuf.ByteString getPayloadManifest() { + return payloadManifest_; + } + /** + * optional bytes payloadManifest = 3; + */ + public Builder setPayloadManifest(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + payloadManifest_ = value; + onChanged(); + return this; + } + /** + * optional bytes payloadManifest = 3; + */ + public Builder clearPayloadManifest() { + bitField0_ = (bitField0_ & ~0x00000004); + payloadManifest_ = getDefaultInstance().getPayloadManifest(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:PersistentPayload) + } + + static { + defaultInstance = new PersistentPayload(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PersistentPayload) + } + + public interface ConfirmMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string processorId = 1; + /** + * optional string processorId = 1; + */ + boolean hasProcessorId(); + /** + * optional string processorId = 1; + */ + java.lang.String getProcessorId(); + /** + * optional string processorId = 1; + */ + com.google.protobuf.ByteString + getProcessorIdBytes(); + + // optional int64 sequenceNr = 2; + /** + * optional int64 sequenceNr = 2; + */ + boolean hasSequenceNr(); + /** + * optional int64 sequenceNr = 2; + */ + long getSequenceNr(); + + // optional string channelId = 3; + /** + * optional string channelId = 3; + */ + boolean hasChannelId(); + /** + * optional string channelId = 3; + */ + java.lang.String getChannelId(); + /** + * optional string channelId = 3; + */ + com.google.protobuf.ByteString + getChannelIdBytes(); + } + /** + * Protobuf type {@code ConfirmMessage} + */ + public static final class ConfirmMessage extends + com.google.protobuf.GeneratedMessage + implements ConfirmMessageOrBuilder { + // Use ConfirmMessage.newBuilder() to construct. + private ConfirmMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ConfirmMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ConfirmMessage defaultInstance; + public static ConfirmMessage getDefaultInstance() { + return defaultInstance; + } + + public ConfirmMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ConfirmMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + processorId_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + sequenceNr_ = input.readInt64(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + channelId_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_ConfirmMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_ConfirmMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.ConfirmMessage.class, akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ConfirmMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ConfirmMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional string processorId = 1; + public static final int PROCESSORID_FIELD_NUMBER = 1; + private java.lang.Object processorId_; + /** + * optional string processorId = 1; + */ + public boolean hasProcessorId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string processorId = 1; + */ + public java.lang.String getProcessorId() { + java.lang.Object ref = processorId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + processorId_ = s; + } + return s; + } + } + /** + * optional string processorId = 1; + */ + public com.google.protobuf.ByteString + getProcessorIdBytes() { + java.lang.Object ref = processorId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + processorId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int64 sequenceNr = 2; + public static final int SEQUENCENR_FIELD_NUMBER = 2; + private long sequenceNr_; + /** + * optional int64 sequenceNr = 2; + */ + public boolean hasSequenceNr() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 sequenceNr = 2; + */ + public long getSequenceNr() { + return sequenceNr_; + } + + // optional string channelId = 3; + public static final int CHANNELID_FIELD_NUMBER = 3; + private java.lang.Object channelId_; + /** + * optional string channelId = 3; + */ + public boolean hasChannelId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string channelId = 3; + */ + public java.lang.String getChannelId() { + java.lang.Object ref = channelId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + channelId_ = s; + } + return s; + } + } + /** + * optional string channelId = 3; + */ + public com.google.protobuf.ByteString + getChannelIdBytes() { + java.lang.Object ref = channelId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + channelId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + processorId_ = ""; + sequenceNr_ = 0L; + channelId_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getProcessorIdBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt64(2, sequenceNr_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getChannelIdBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getProcessorIdBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, sequenceNr_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getChannelIdBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.ConfirmMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(akka.persistence.serialization.MessageFormats.ConfirmMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code ConfirmMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements akka.persistence.serialization.MessageFormats.ConfirmMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_ConfirmMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_ConfirmMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.ConfirmMessage.class, akka.persistence.serialization.MessageFormats.ConfirmMessage.Builder.class); + } + + // Construct using akka.persistence.serialization.MessageFormats.ConfirmMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + processorId_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + sequenceNr_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + channelId_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return akka.persistence.serialization.MessageFormats.internal_static_ConfirmMessage_descriptor; + } + + public akka.persistence.serialization.MessageFormats.ConfirmMessage getDefaultInstanceForType() { + return akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); + } + + public akka.persistence.serialization.MessageFormats.ConfirmMessage build() { + akka.persistence.serialization.MessageFormats.ConfirmMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public akka.persistence.serialization.MessageFormats.ConfirmMessage buildPartial() { + akka.persistence.serialization.MessageFormats.ConfirmMessage result = new akka.persistence.serialization.MessageFormats.ConfirmMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.processorId_ = processorId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.sequenceNr_ = sequenceNr_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.channelId_ = channelId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof akka.persistence.serialization.MessageFormats.ConfirmMessage) { + return mergeFrom((akka.persistence.serialization.MessageFormats.ConfirmMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(akka.persistence.serialization.MessageFormats.ConfirmMessage other) { + if (other == akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance()) return this; + if (other.hasProcessorId()) { + bitField0_ |= 0x00000001; + processorId_ = other.processorId_; + onChanged(); + } + if (other.hasSequenceNr()) { + setSequenceNr(other.getSequenceNr()); + } + if (other.hasChannelId()) { + bitField0_ |= 0x00000004; + channelId_ = other.channelId_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + akka.persistence.serialization.MessageFormats.ConfirmMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (akka.persistence.serialization.MessageFormats.ConfirmMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string processorId = 1; + private java.lang.Object processorId_ = ""; + /** + * optional string processorId = 1; + */ + public boolean hasProcessorId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string processorId = 1; + */ + public java.lang.String getProcessorId() { + java.lang.Object ref = processorId_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + processorId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string processorId = 1; + */ + public com.google.protobuf.ByteString + getProcessorIdBytes() { + java.lang.Object ref = processorId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + processorId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string processorId = 1; + */ + public Builder setProcessorId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + processorId_ = value; + onChanged(); + return this; + } + /** + * optional string processorId = 1; + */ + public Builder clearProcessorId() { + bitField0_ = (bitField0_ & ~0x00000001); + processorId_ = getDefaultInstance().getProcessorId(); + onChanged(); + return this; + } + /** + * optional string processorId = 1; + */ + public Builder setProcessorIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + processorId_ = value; + onChanged(); + return this; + } + + // optional int64 sequenceNr = 2; + private long sequenceNr_ ; + /** + * optional int64 sequenceNr = 2; + */ + public boolean hasSequenceNr() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 sequenceNr = 2; + */ + public long getSequenceNr() { + return sequenceNr_; + } + /** + * optional int64 sequenceNr = 2; + */ + public Builder setSequenceNr(long value) { + bitField0_ |= 0x00000002; + sequenceNr_ = value; + onChanged(); + return this; + } + /** + * optional int64 sequenceNr = 2; + */ + public Builder clearSequenceNr() { + bitField0_ = (bitField0_ & ~0x00000002); + sequenceNr_ = 0L; + onChanged(); + return this; + } + + // optional string channelId = 3; + private java.lang.Object channelId_ = ""; + /** + * optional string channelId = 3; + */ + public boolean hasChannelId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string channelId = 3; + */ + public java.lang.String getChannelId() { + java.lang.Object ref = channelId_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + channelId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string channelId = 3; + */ + public com.google.protobuf.ByteString + getChannelIdBytes() { + java.lang.Object ref = channelId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + channelId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string channelId = 3; + */ + public Builder setChannelId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + channelId_ = value; + onChanged(); + return this; + } + /** + * optional string channelId = 3; + */ + public Builder clearChannelId() { + bitField0_ = (bitField0_ & ~0x00000004); + channelId_ = getDefaultInstance().getChannelId(); + onChanged(); + return this; + } + /** + * optional string channelId = 3; + */ + public Builder setChannelIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + channelId_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ConfirmMessage) + } + + static { + defaultInstance = new ConfirmMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ConfirmMessage) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PersistentMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PersistentMessage_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PersistentPayload_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PersistentPayload_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ConfirmMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ConfirmMessage_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\024MessageFormats.proto\"\371\001\n\021PersistentMes" + + "sage\022#\n\007payload\030\001 \001(\0132\022.PersistentPayloa" + + "d\022\022\n\nsequenceNr\030\002 \001(\003\022\023\n\013processorId\030\003 \001" + + "(\t\022\021\n\tchannelId\030\004 \001(\t\022\017\n\007deleted\030\005 \001(\010\022\020" + + "\n\010resolved\030\006 \001(\010\022\020\n\010confirms\030\010 \003(\t\022\'\n\016co" + + "nfirmMessage\030\n \001(\0132\017.ConfirmMessage\022\025\n\rc" + + "onfirmTarget\030\t \001(\t\022\016\n\006sender\030\007 \001(\t\"S\n\021Pe" + + "rsistentPayload\022\024\n\014serializerId\030\001 \002(\005\022\017\n" + + "\007payload\030\002 \002(\014\022\027\n\017payloadManifest\030\003 \001(\014\"" + + "L\n\016ConfirmMessage\022\023\n\013processorId\030\001 \001(\t\022\022", + "\n\nsequenceNr\030\002 \001(\003\022\021\n\tchannelId\030\003 \001(\tB\"\n" + + "\036akka.persistence.serializationH\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_PersistentMessage_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_PersistentMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PersistentMessage_descriptor, + new java.lang.String[] { "Payload", "SequenceNr", "ProcessorId", "ChannelId", "Deleted", "Resolved", "Confirms", "ConfirmMessage", "ConfirmTarget", "Sender", }); + internal_static_PersistentPayload_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_PersistentPayload_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PersistentPayload_descriptor, + new java.lang.String[] { "SerializerId", "Payload", "PayloadManifest", }); + internal_static_ConfirmMessage_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_ConfirmMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ConfirmMessage_descriptor, + new java.lang.String[] { "ProcessorId", "SequenceNr", "ChannelId", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java new file mode 100644 index 00000000000..b7623ab79b3 --- /dev/null +++ b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java @@ -0,0 +1,52 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence.snapshot.japi; + +import scala.concurrent.Future; + +import akka.japi.Option; +import akka.persistence.*; + +interface SnapshotStorePlugin { + //#snapshot-store-plugin-api + /** + * Plugin Java API. + * + * Asynchronously loads a snapshot. + * + * @param processorId processor id. + * @param criteria selection criteria for loading. + */ + Future> doLoadAsync(String processorId, SnapshotSelectionCriteria criteria); + + /** + * Plugin Java API. + * + * Asynchronously saves a snapshot. + * + * @param metadata snapshot metadata. + * @param snapshot snapshot. + */ + Future doSaveAsync(SnapshotMetadata metadata, Object snapshot); + + /** + * Plugin Java API. + * + * Called after successful saving of a snapshot. + * + * @param metadata snapshot metadata. + */ + void onSaved(SnapshotMetadata metadata) throws Exception; + + /** + * Plugin Java API. + * + * Deletes the snapshot identified by `metadata`. + * + * @param metadata snapshot metadata. + */ + void doDelete(SnapshotMetadata metadata) throws Exception; + //#snapshot-store-plugin-api +} diff --git a/akka-persistence/src/main/protobuf/MessageFormats.proto b/akka-persistence/src/main/protobuf/MessageFormats.proto new file mode 100644 index 00000000000..eaf389c044a --- /dev/null +++ b/akka-persistence/src/main/protobuf/MessageFormats.proto @@ -0,0 +1,31 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +option java_package = "akka.persistence.serialization"; +option optimize_for = SPEED; + +message PersistentMessage { + optional PersistentPayload payload = 1; + optional int64 sequenceNr = 2; + optional string processorId = 3; + optional string channelId = 4; + optional bool deleted = 5; + optional bool resolved = 6; + repeated string confirms = 8; + optional ConfirmMessage confirmMessage = 10; + optional string confirmTarget = 9; + optional string sender = 7; +} + +message PersistentPayload { + required int32 serializerId = 1; + required bytes payload = 2; + optional bytes payloadManifest = 3; +} + +message ConfirmMessage { + optional string processorId = 1; + optional int64 sequenceNr = 2; + optional string channelId = 3; +} diff --git a/akka-persistence/src/main/resources/reference.conf b/akka-persistence/src/main/resources/reference.conf index 7599d38e1d1..ed9f7e85767 100644 --- a/akka-persistence/src/main/resources/reference.conf +++ b/akka-persistence/src/main/resources/reference.conf @@ -2,8 +2,27 @@ # Akka Persistence Reference Config File # ########################################## + + akka { + # Protobuf serialization for persistent messages + actor { + + serializers { + + akka-persistence-snapshot = "akka.persistence.serialization.SnapshotSerializer" + akka-persistence-message = "akka.persistence.serialization.MessageSerializer" + } + + serialization-bindings { + + "akka.persistence.serialization.Snapshot" = akka-persistence-snapshot + "akka.persistence.PersistentImpl" = akka-persistence-message + "akka.persistence.Confirm" = akka-persistence-message + } + } + persistence { journal { diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala index 241e444dc80..7685b8ae4e7 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala @@ -14,9 +14,6 @@ import akka.persistence.journal.AsyncWriteJournal * Persistence extension. */ object Persistence extends ExtensionId[Persistence] with ExtensionIdProvider { - class Settings(config: Config) { - } - /** * Java API. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala index 4f17663280d..5aa94d741d0 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala @@ -83,13 +83,13 @@ object Persistent { * * Internal [[Persistent]] message representation. * - * @param resolved `true` by default, `false` for replayed messages. Set to `true` by a channel if this - * message is replayed and its sender reference was resolved. Channels use this field to - * avoid redundant sender reference resolutions. * @param processorId Id of processor that journaled the message. * @param channelId Id of last channel that delivered the message to a destination. * @param sender Serialized sender reference. * @param deleted `true` if this message is marked as deleted. + * @param resolved `true` by default, `false` for replayed messages. Set to `true` by a channel if this + * message is replayed and its sender reference was resolved. Channels use this field to + * avoid redundant sender reference resolutions. * @param confirms Channel ids of delivery confirmations that are available for this message. Only non-empty * for replayed messages. * @param confirmTarget Delivery confirmation target. @@ -102,14 +102,14 @@ object Persistent { case class PersistentImpl( payload: Any, sequenceNr: Long = 0L, - resolved: Boolean = true, - processorId: String = "", - channelId: String = "", - sender: String = "", + processorId: String = PersistentImpl.Undefined, + channelId: String = PersistentImpl.Undefined, deleted: Boolean = false, + resolved: Boolean = true, confirms: Seq[String] = Nil, + confirmMessage: Confirm = null, confirmTarget: ActorRef = null, - confirmMessage: Confirm = null) extends Persistent { + sender: ActorRef = null) extends Persistent { def withPayload(payload: Any): Persistent = copy(payload = payload) @@ -126,21 +126,17 @@ case class PersistentImpl( } object PersistentImpl { - /** - * Java Plugin API. - */ - def create(payload: Any, sequenceNr: Long, resolved: Boolean, processorId: String, channelId: String, sender: String, deleted: Boolean, confirms: Seq[String]): PersistentImpl = - PersistentImpl(payload, sequenceNr, resolved, processorId, channelId, sender, deleted, confirms) + val Undefined = "" /** * Java Plugin API. */ - def create(payload: Any, sequenceNr: Long, resolved: Boolean, processorId: String, channelId: String, sender: String, deleted: Boolean, confirms: Seq[String], confirmTarget: ActorRef, confirmMessage: Confirm): PersistentImpl = - PersistentImpl(payload, sequenceNr, resolved, processorId, channelId, sender, deleted, confirms, confirmTarget, confirmMessage) + def create(payload: Any, sequenceNr: Long, processorId: String, channelId: String, deleted: Boolean, resolved: Boolean, confirms: Seq[String], confirmMessage: Confirm, confirmTarget: ActorRef, sender: ActorRef): PersistentImpl = + PersistentImpl(payload, sequenceNr, processorId, channelId, deleted, resolved, confirms, confirmMessage, confirmTarget, sender) } /** - * Receive by a processor when a journal failed to write a [[Persistent]] message. + * Received by a processor when a journal failed to write a [[Persistent]] message. * * @param payload payload of the persistent message. * @param sequenceNr sequence number of the persistent message. @@ -149,6 +145,8 @@ object PersistentImpl { case class PersistenceFailure(payload: Any, sequenceNr: Long, cause: Throwable) /** + * Internal API. + * * Message to confirm the receipt of a persistent message (sent via a [[Channel]]). */ @SerialVersionUID(1L) diff --git a/akka-persistence/src/main/scala/akka/persistence/Processor.scala b/akka-persistence/src/main/scala/akka/persistence/Processor.scala index 4cf1eda1095..5592b68df48 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Processor.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Processor.scala @@ -4,9 +4,16 @@ package akka.persistence +import akka.AkkaException import akka.actor._ import akka.dispatch._ +/** + * Thrown by a [[Processor]] if a journal failed to replay all requested messages. + */ +@SerialVersionUID(1L) +case class ReplayFailureException(message: String, cause: Throwable) extends AkkaException(message, cause) + /** * An actor that persists (journals) messages of type [[Persistent]]. Messages of other types are not persisted. * @@ -119,7 +126,8 @@ trait Processor extends Actor with Stash { unstashAllInternal() } case ReplayFailure(cause) ⇒ { - throw cause + val errorMsg = s"Replay failure by journal (processor id = [${processorId}])" + throw new ReplayFailureException(errorMsg, cause) } case Replayed(p) ⇒ try { processPersistent(receive, p) } catch { case t: Throwable ⇒ { @@ -140,13 +148,22 @@ trait Processor extends Actor with Stash { override def toString: String = "recovery finished" def aroundReceive(receive: Actor.Receive, message: Any) = message match { - case r: Recover ⇒ // ignore - case Replayed(p) ⇒ processPersistent(receive, p) // can occur after unstash from user stash - case WriteSuccess(p) ⇒ processPersistent(receive, p) - case WriteFailure(p, cause) ⇒ process(receive, PersistenceFailure(p.payload, p.sequenceNr, cause)) - case LoopSuccess(m) ⇒ process(receive, m) - case p: PersistentImpl ⇒ journal forward Write(p.copy(processorId = processorId, sequenceNr = nextSequenceNr()), self) - case m ⇒ journal forward Loop(m, self) + case r: Recover ⇒ // ignore + case Replayed(p) ⇒ processPersistent(receive, p) // can occur after unstash from user stash + case WriteSuccess(p) ⇒ processPersistent(receive, p) + case WriteFailure(p, cause) ⇒ { + val notification = PersistenceFailure(p.payload, p.sequenceNr, cause) + if (receive.isDefinedAt(notification)) process(receive, notification) + else { + val errorMsg = "Processor killed after persistence failure " + + s"(processor id = [${processorId}], sequence nr = [${p.sequenceNr}], payload class = [${p.payload.getClass.getName}]). " + + "To avoid killing processors on persistence failure, a processor must handle PersistenceFailure messages." + throw new ActorKilledException(errorMsg) + } + } + case LoopSuccess(m) ⇒ process(receive, m) + case p: PersistentImpl ⇒ journal forward Write(p.copy(processorId = processorId, sequenceNr = nextSequenceNr()), self) + case m ⇒ journal forward Loop(m, self) } } @@ -159,7 +176,7 @@ trait Processor extends Actor with Stash { override def toString: String = "recovery failed" def aroundReceive(receive: Actor.Receive, message: Any) = message match { - case ReplaySuccess(maxSnr) ⇒ { + case ReplaySuccess(_) | ReplayFailure(_) ⇒ { _currentState = prepareRestart mailbox.enqueueFirst(self, _recoveryFailureMessage) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala index f79bcd6f8c4..0bab9f1b6ba 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala @@ -33,7 +33,8 @@ trait AsyncReplay { * @param processorId processor id. * @param fromSequenceNr sequence number where replay should start. * @param toSequenceNr sequence number where replay should end (inclusive). - * @param replayCallback called to replay a single message. + * @param replayCallback called to replay a single message. Can be called from any + * thread. * * @see [[AsyncWriteJournal]] * @see [[SyncWriteJournal]] diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala index aedba268446..cc3b9e07e06 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala @@ -20,7 +20,6 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { import AsyncWriteJournal._ import context.dispatcher - private val extension = Persistence(context.system) private val resequencer = context.actorOf(Props[Resequencer]) private var resequencerCounter = 1L @@ -29,7 +28,7 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { val csdr = sender val cctr = resequencerCounter val psdr = if (sender.isInstanceOf[PromiseActorRef]) context.system.deadLetters else sender - writeAsync(persistent.copy(sender = Serialization.serializedActorPath(psdr), resolved = false, confirmTarget = null, confirmMessage = null)) map { + writeAsync(persistent.copy(sender = psdr, resolved = false, confirmTarget = null, confirmMessage = null)) map { _ ⇒ Desequenced(WriteSuccess(persistent), cctr, processor, csdr) } recover { case e ⇒ Desequenced(WriteFailure(persistent, e), cctr, processor, csdr) @@ -40,7 +39,7 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { // Send replayed messages and replay result to processor directly. No need // to resequence replayed messages relative to written and looped messages. replayAsync(processorId, fromSequenceNr, toSequenceNr) { p ⇒ - if (!p.deleted) processor.tell(Replayed(p), extension.system.provider.resolveActorRef(p.sender)) + if (!p.deleted) processor.tell(Replayed(p), p.sender) } map { maxSnr ⇒ ReplaySuccess(maxSnr) } recover { diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala index 31f3db30ed7..dfa4c7e7d52 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala @@ -23,14 +23,14 @@ trait SyncWriteJournal extends Actor with AsyncReplay { final def receive = { case Write(persistent, processor) ⇒ { val sdr = if (sender.isInstanceOf[PromiseActorRef]) context.system.deadLetters else sender - Try(write(persistent.copy(sender = Serialization.serializedActorPath(sdr), resolved = false, confirmTarget = null, confirmMessage = null))) match { + Try(write(persistent.copy(sender = sdr, resolved = false, confirmTarget = null, confirmMessage = null))) match { case Success(_) ⇒ processor forward WriteSuccess(persistent) case Failure(e) ⇒ processor forward WriteFailure(persistent, e); throw e } } case Replay(fromSequenceNr, toSequenceNr, processorId, processor) ⇒ { replayAsync(processorId, fromSequenceNr, toSequenceNr) { p ⇒ - if (!p.deleted) processor.tell(Replayed(p), extension.system.provider.resolveActorRef(p.sender)) + if (!p.deleted) processor.tell(Replayed(p), p.sender) } map { maxSnr ⇒ ReplaySuccess(maxSnr) } recover { diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala index 948755faac8..6ac8157fc6a 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala @@ -13,35 +13,16 @@ import akka.japi.Procedure import akka.persistence.journal.{ AsyncReplay ⇒ SAsyncReplay } import akka.persistence.PersistentImpl -abstract class AsyncReplay extends SAsyncReplay { this: Actor ⇒ +/** + * Java API. + * + * Asynchronous message replay interface. + */ +abstract class AsyncReplay extends SAsyncReplay with AsyncReplayPlugin { this: Actor ⇒ import context.dispatcher final def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentImpl) ⇒ Unit) = doReplayAsync(processorId, fromSequenceNr, toSequenceNr, new Procedure[PersistentImpl] { def apply(p: PersistentImpl) = replayCallback(p) }).map(_.longValue) - - /** - * Plugin Java API. - * - * Asynchronously replays persistent messages. Implementations replay a message - * by calling `replayCallback`. The returned future must be completed when all - * messages (matching the sequence number bounds) have been replayed. The future - * `Long` value must be the highest stored sequence number in the journal for the - * specified processor. The future must be completed with a failure if any of - * the persistent messages could not be replayed. - * - * The `replayCallback` must also be called with messages that have been marked - * as deleted. In this case a replayed message's `deleted` field must be set to - * `true`. - * - * The channel ids of delivery confirmations that are available for a replayed - * message must be contained in that message's `confirms` sequence. - * - * @param processorId processor id. - * @param fromSequenceNr sequence number where replay should start. - * @param toSequenceNr sequence number where replay should end (inclusive). - * @param replayCallback called to replay a single message. - */ - def doReplayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long, replayCallback: Procedure[PersistentImpl]): Future[JLong] } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala index 4a5c2594d99..e841a5622a8 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala @@ -14,7 +14,7 @@ import akka.persistence.PersistentImpl * * Abstract journal, optimized for asynchronous, non-blocking writes. */ -abstract class AsyncWriteJournal extends AsyncReplay with SAsyncWriteJournal { +abstract class AsyncWriteJournal extends AsyncReplay with SAsyncWriteJournal with AsyncWritePlugin { import context.dispatcher final def writeAsync(persistent: PersistentImpl) = @@ -25,25 +25,4 @@ abstract class AsyncWriteJournal extends AsyncReplay with SAsyncWriteJournal { final def confirmAsync(processorId: String, sequenceNr: Long, channelId: String) = doConfirmAsync(processorId, sequenceNr, channelId).map(Unit.unbox) - - /** - * Plugin Java API. - * - * Asynchronously writes a `persistent` message to the journal. - */ - def doWriteAsync(persistent: PersistentImpl): Future[Void] - - /** - * Plugin Java API. - * - * Asynchronously marks a `persistent` message as deleted. - */ - def doDeleteAsync(persistent: PersistentImpl): Future[Void] - - /** - * Plugin Java API. - * - * Asynchronously writes a delivery confirmation to the journal. - */ - def doConfirmAsync(processorId: String, sequenceNr: Long, channelId: String): Future[Void] } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala index 559594577bb..992ca8f5d68 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala @@ -12,7 +12,7 @@ import akka.persistence.PersistentImpl * * Abstract journal, optimized for synchronous writes. */ -abstract class SyncWriteJournal extends AsyncReplay with SSyncWriteJournal { +abstract class SyncWriteJournal extends AsyncReplay with SSyncWriteJournal with SyncWritePlugin { final def write(persistent: PersistentImpl) = doWrite(persistent) @@ -21,28 +21,4 @@ abstract class SyncWriteJournal extends AsyncReplay with SSyncWriteJournal { final def confirm(processorId: String, sequenceNr: Long, channelId: String) = doConfirm(processorId, sequenceNr, channelId) - - /** - * Plugin Java API. - * - * Synchronously writes a `persistent` message to the journal. - */ - @throws(classOf[Exception]) - def doWrite(persistent: PersistentImpl): Unit - - /** - * Plugin Java API. - * - * Synchronously marks a `persistent` message as deleted. - */ - @throws(classOf[Exception]) - def doDelete(persistent: PersistentImpl): Unit - - /** - * Plugin Java API. - * - * Synchronously writes a delivery confirmation to the journal. - */ - @throws(classOf[Exception]) - def doConfirm(processorId: String, sequenceNr: Long, channelId: String): Unit } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala index 7a0063eb416..b1c8c145430 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala @@ -32,9 +32,7 @@ private[leveldb] class LeveldbJournal extends SyncWriteJournal with LeveldbIdMap // needed if default processor and channel ids are used // (actor paths, which contain deployment information). - // TODO: use protobuf serializer for PersistentImpl - // TODO: use user-defined serializer for payload - val serializer = SerializationExtension(context.system).findSerializerFor("") + val serialization = SerializationExtension(context.system) import Key._ @@ -55,8 +53,8 @@ private[leveldb] class LeveldbJournal extends SyncWriteJournal with LeveldbIdMap def leveldbSnapshot = leveldbReadOptions.snapshot(leveldb.getSnapshot) def leveldbIterator = leveldb.iterator(leveldbSnapshot) - def persistentToBytes(p: PersistentImpl): Array[Byte] = serializer.toBinary(p) - def persistentFromBytes(a: Array[Byte]): PersistentImpl = serializer.fromBinary(a).asInstanceOf[PersistentImpl] + def persistentToBytes(p: PersistentImpl): Array[Byte] = serialization.serialize(p).get + def persistentFromBytes(a: Array[Byte]): PersistentImpl = serialization.deserialize(a, classOf[PersistentImpl]).get private def withBatch[R](body: WriteBatch ⇒ R): R = { val batch = leveldb.createWriteBatch() diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala new file mode 100644 index 00000000000..afcbbe01839 --- /dev/null +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala @@ -0,0 +1,126 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence.serialization + +import scala.language.existentials + +import com.google.protobuf._ + +import akka.actor.ExtendedActorSystem +import akka.persistence._ +import akka.persistence.serialization.MessageFormats._ +import akka.serialization._ + +/** + * Protobuf serializer for [[Persistent]] and `Confirm` messages. + */ +class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { + import PersistentImpl.Undefined + + val PersistentClass = classOf[PersistentImpl] + val ConfirmClass = classOf[Confirm] + + def identifier: Int = 7 + def includeManifest: Boolean = true + + /** + * Serializes a [[Persistent]] message. Delegates serialization of the persistent message's + * payload to a matching `akka.serialization.Serializer`. + */ + def toBinary(o: AnyRef): Array[Byte] = o match { + case p: PersistentImpl ⇒ persistentMessageBuilder(p).build().toByteArray + case c: Confirm ⇒ confirmMessageBuilder(c).build().toByteArray + case _ ⇒ throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}") + } + + /** + * Deserializes a [[Persistent]] message. Delegates deserialization of the persistent message's + * payload to a matching `akka.serialization.Serializer`. + */ + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { + case None ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case Some(c) ⇒ c match { + case PersistentClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case ConfirmClass ⇒ confirm(ConfirmMessage.parseFrom(bytes)) + case _ ⇒ throw new IllegalArgumentException(s"Can't deserialize object of type ${c}") + } + } + + // + // toBinary helpers + // + + private def persistentMessageBuilder(persistent: PersistentImpl) = { + val builder = PersistentMessage.newBuilder + + if (persistent.processorId != Undefined) builder.setProcessorId(persistent.processorId) + if (persistent.channelId != Undefined) builder.setChannelId(persistent.channelId) + if (persistent.confirmMessage != null) builder.setConfirmMessage(confirmMessageBuilder(persistent.confirmMessage)) + if (persistent.confirmTarget != null) builder.setConfirmTarget(Serialization.serializedActorPath(persistent.confirmTarget)) + if (persistent.sender != null) builder.setSender(Serialization.serializedActorPath(persistent.sender)) + + persistent.confirms.foreach(builder.addConfirms) + + builder.setPayload(persistentPayloadBuilder(persistent.payload.asInstanceOf[AnyRef])) + builder.setSequenceNr(persistent.sequenceNr) + builder.setDeleted(persistent.deleted) + builder.setResolved(persistent.resolved) + builder + } + + private def persistentPayloadBuilder(payload: AnyRef) = { + val serializer = SerializationExtension(system).findSerializerFor(payload) + val builder = PersistentPayload.newBuilder() + + if (serializer.includeManifest) builder.setPayloadManifest((ByteString.copyFromUtf8(payload.getClass.getName))) + + builder.setPayload(ByteString.copyFrom(serializer.toBinary(payload))) + builder.setSerializerId(serializer.identifier) + builder + } + + private def confirmMessageBuilder(confirm: Confirm) = { + ConfirmMessage.newBuilder + .setProcessorId(confirm.processorId) + .setSequenceNr(confirm.sequenceNr) + .setChannelId(confirm.channelId) + } + + // + // fromBinary helpers + // + + private def persistent(persistentMessage: PersistentMessage): PersistentImpl = { + import scala.collection.JavaConverters._ + PersistentImpl( + payload(persistentMessage.getPayload), + persistentMessage.getSequenceNr, + if (persistentMessage.hasProcessorId) persistentMessage.getProcessorId else Undefined, + if (persistentMessage.hasChannelId) persistentMessage.getChannelId else Undefined, + persistentMessage.getDeleted, + persistentMessage.getResolved, + persistentMessage.getConfirmsList.asScala.toList, + if (persistentMessage.hasConfirmMessage) confirm(persistentMessage.getConfirmMessage) else null, + if (persistentMessage.hasConfirmTarget) system.provider.resolveActorRef(persistentMessage.getConfirmTarget) else null, + if (persistentMessage.hasSender) system.provider.resolveActorRef(persistentMessage.getSender) else null) + } + + private def payload(persistentPayload: PersistentPayload): Any = { + val payloadClass = if (persistentPayload.hasPayloadManifest) + Some(system.dynamicAccess.getClassFor[AnyRef](persistentPayload.getPayloadManifest.toStringUtf8).get) else None + + SerializationExtension(system).deserialize( + persistentPayload.getPayload.toByteArray, + persistentPayload.getSerializerId, + payloadClass).get + } + + private def confirm(confirmMessage: ConfirmMessage): Confirm = { + Confirm( + confirmMessage.getProcessorId, + confirmMessage.getSequenceNr, + confirmMessage.getChannelId) + } +} diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala new file mode 100644 index 00000000000..066c3e16d64 --- /dev/null +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala @@ -0,0 +1,87 @@ +/** + * Copyright (C) 2012-2013 Eligotech BV. + */ + +package akka.persistence.serialization + +import java.io._ + +import akka.actor._ +import akka.serialization.{ Serializer, SerializationExtension } + +/** + * Wrapper for snapshot `data`. Snapshot `data` are the actual snapshot objects captured by + * a [[Processor]]. + * + * @see [[SnapshotSerializer]] + */ +@SerialVersionUID(1L) +case class Snapshot(data: Any) + +/** + * INTERNAL API. + */ +@SerialVersionUID(1L) +private[serialization] case class SnapshotHeader(serializerId: Int, manifest: Option[String]) + +/** + * [[Snapshot]] serializer. + */ +class SnapshotSerializer(system: ExtendedActorSystem) extends Serializer { + def identifier: Int = 8 + def includeManifest: Boolean = false + + /** + * Serializes a [[Snapshot]]. Delegates serialization of snapshot `data` to a matching + * `akka.serialization.Serializer`. + */ + def toBinary(o: AnyRef): Array[Byte] = o match { + case Snapshot(data) ⇒ snapshotToBinary(data.asInstanceOf[AnyRef]) + case _ ⇒ throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}") + } + + /** + * Deserializes a [[Snapshot]]. Delegates deserialization of snapshot `data` to a matching + * `akka.serialization.Serializer`. + */ + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + Snapshot(snapshotFromBinary(bytes)) + + private def snapshotToBinary(snapshot: AnyRef): Array[Byte] = { + val extension = SerializationExtension(system) + + val snapshotSerializer = extension.findSerializerFor(snapshot) + val snapshotManifest = if (snapshotSerializer.includeManifest) Some(snapshot.getClass.getName) else None + + val header = SnapshotHeader(snapshotSerializer.identifier, snapshotManifest) + val headerSerializer = extension.findSerializerFor(header) + val headerBytes = headerSerializer.toBinary(header) + + val out = new ByteArrayOutputStream + + writeInt(out, headerBytes.length) + + out.write(headerBytes) + out.write(snapshotSerializer.toBinary(snapshot)) + out.toByteArray + } + + private def snapshotFromBinary(bytes: Array[Byte]): AnyRef = { + val extension = SerializationExtension(system) + + val headerLength = readInt(new ByteArrayInputStream(bytes)) + val headerBytes = bytes.slice(4, headerLength + 4) + val snapshotBytes = bytes.drop(headerLength + 4) + + val header = extension.deserialize(headerBytes, classOf[SnapshotHeader]).get + val manifest = header.manifest.map(system.dynamicAccess.getClassFor[AnyRef](_).get) + + extension.deserialize[AnyRef](snapshotBytes, header.serializerId, manifest).get + } + + private def writeInt(outputStream: OutputStream, i: Int) = + 0 to 24 by 8 foreach { shift ⇒ outputStream.write(i >> shift) } + + private def readInt(inputStream: InputStream) = + (0 to 24 by 8).foldLeft(0) { (id, shift) ⇒ (id | (inputStream.read() >> shift)) } +} diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala new file mode 100644 index 00000000000..59776b30264 --- /dev/null +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala @@ -0,0 +1,26 @@ +/** + * Copyright (C) 2009-2013 Typesafe Inc. + */ + +package akka.persistence + +import java.io.{ ByteArrayOutputStream, InputStream } + +package object serialization { + /** + * Converts an input stream to a byte array. + */ + def streamToBytes(inputStream: InputStream): Array[Byte] = { + val len = 16384 + val buf = Array.ofDim[Byte](len) + val out = new ByteArrayOutputStream + + @scala.annotation.tailrec + def copy(): Array[Byte] = { + val n = inputStream.read(buf, 0, len) + if (n != -1) { out.write(buf, 0, n); copy() } else out.toByteArray + } + + copy() + } +} diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotSerialization.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotSerialization.scala deleted file mode 100644 index 7e8968de4f2..00000000000 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotSerialization.scala +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Copyright (C) 2012-2013 Eligotech BV. - */ - -package akka.persistence.snapshot - -import java.io._ - -import akka.actor._ -import akka.persistence.SnapshotMetadata -import akka.util.ClassLoaderObjectInputStream - -/** - * Snapshot serialization extension. - */ -private[persistence] object SnapshotSerialization extends ExtensionId[SnapshotSerialization] with ExtensionIdProvider { - def createExtension(system: ExtendedActorSystem): SnapshotSerialization = new SnapshotSerialization(system) - def lookup() = SnapshotSerialization -} - -/** - * Snapshot serialization extension. - */ -private[persistence] class SnapshotSerialization(val system: ExtendedActorSystem) extends Extension { - import akka.serialization.JavaSerializer - - /** - * Java serialization based snapshot serializer. - */ - val java = new SnapshotSerializer { - def serialize(stream: OutputStream, metadata: SnapshotMetadata, state: Any) = { - val out = new ObjectOutputStream(stream) - JavaSerializer.currentSystem.withValue(system) { out.writeObject(state) } - } - - def deserialize(stream: InputStream, metadata: SnapshotMetadata) = { - val in = new ClassLoaderObjectInputStream(system.dynamicAccess.classLoader, stream) - JavaSerializer.currentSystem.withValue(system) { in.readObject } - } - } -} - -/** - * Stream-based snapshot serializer. - */ -private[persistence] trait SnapshotSerializer { - /** - * Serializes a `snapshot` to an output stream. - */ - def serialize(stream: OutputStream, metadata: SnapshotMetadata, snapshot: Any): Unit - - /** - * Deserializes a snapshot from an input stream. - */ - def deserialize(stream: InputStream, metadata: SnapshotMetadata): Any -} - diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala index addc796087a..3b54ed24121 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala @@ -5,7 +5,6 @@ package akka.persistence.snapshot import scala.concurrent.Future -import scala.util._ import akka.actor._ import akka.pattern.pipe diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala index d4594c3a503..0c095f04cc2 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala @@ -10,7 +10,12 @@ import akka.japi.{ Option ⇒ JOption } import akka.persistence._ import akka.persistence.snapshot.{ SnapshotStore ⇒ SSnapshotStore } -abstract class SnapshotStore extends SSnapshotStore { +/** + * Java API. + * + * Abstract snapshot store. + */ +abstract class SnapshotStore extends SSnapshotStore with SnapshotStorePlugin { import context.dispatcher final def loadAsync(processorId: String, criteria: SnapshotSelectionCriteria) = @@ -24,44 +29,4 @@ abstract class SnapshotStore extends SSnapshotStore { final def delete(metadata: SnapshotMetadata) = doDelete(metadata) - - /** - * Plugin Java API. - * - * Asynchronously loads a snapshot. - * - * @param processorId processor id. - * @param criteria selection criteria for loading. - */ - def doLoadAsync(processorId: String, criteria: SnapshotSelectionCriteria): Future[JOption[SelectedSnapshot]] - - /** - * Plugin Java API. - * - * Asynchronously saves a snapshot. - * - * @param metadata snapshot metadata. - * @param snapshot snapshot. - */ - def doSaveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Void] - - /** - * Plugin Java API. - * - * Called after successful saving of a snapshot. - * - * @param metadata snapshot metadata. - */ - @throws(classOf[Exception]) - def onSaved(metadata: SnapshotMetadata): Unit - - /** - * Plugin Java API. - * - * Deletes the snapshot identified by `metadata`. - * - * @param metadata snapshot metadata. - */ - @throws(classOf[Exception]) - def doDelete(metadata: SnapshotMetadata): Unit } diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala index 79277ba5169..53a8d13f67c 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala @@ -14,6 +14,8 @@ import scala.util._ import akka.actor.ActorLogging import akka.persistence._ import akka.persistence.snapshot._ +import akka.persistence.serialization._ +import akka.serialization.SerializationExtension /** * INTERNAL API. @@ -27,8 +29,7 @@ private[persistence] class LocalSnapshotStore extends SnapshotStore with ActorLo private val streamDispatcher = context.system.dispatchers.lookup(config.getString("stream-dispatcher")) private val snapshotDir = new File(config.getString("dir")) - // TODO: make snapshot serializer configurable - private val snapshotSerializer = SnapshotSerialization(context.system).java + private val serializationExtension = SerializationExtension(context.system) private var snapshotMetadata = Map.empty[String, SortedSet[SnapshotMetadata]] def loadAsync(processorId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = @@ -57,8 +58,8 @@ private[persistence] class LocalSnapshotStore extends SnapshotStore with ActorLo def load(metadata: SortedSet[SnapshotMetadata]): Option[SelectedSnapshot] = metadata.lastOption match { case None ⇒ None case Some(md) ⇒ { - Try(withInputStream(md)(snapshotSerializer.deserialize(_, md))) match { - case Success(s) ⇒ Some(SelectedSnapshot(md, s)) + Try(withInputStream(md)(deserialize)) match { + case Success(s) ⇒ Some(SelectedSnapshot(md, s.data)) case Failure(e) ⇒ { log.error(e, s"error loading snapshot ${md}") load(metadata.init) // try older snapshot @@ -84,12 +85,18 @@ private[persistence] class LocalSnapshotStore extends SnapshotStore with ActorLo } private def save(metadata: SnapshotMetadata, snapshot: Any): Unit = - withOutputStream(metadata)(snapshotSerializer.serialize(_, metadata, snapshot)) + withOutputStream(metadata)(serialize(_, Snapshot(snapshot))) - private def withOutputStream(metadata: SnapshotMetadata)(p: (OutputStream) ⇒ Unit) = + protected def deserialize(inputStream: InputStream): Snapshot = + serializationExtension.deserialize(streamToBytes(inputStream), classOf[Snapshot]).get + + protected def serialize(outputStream: OutputStream, snapshot: Snapshot): Unit = + outputStream.write(serializationExtension.findSerializerFor(snapshot).toBinary(snapshot)) + + private def withOutputStream(metadata: SnapshotMetadata)(p: (OutputStream) ⇒ Unit): Unit = withStream(new BufferedOutputStream(new FileOutputStream(snapshotFile(metadata))), p) - private def withInputStream(metadata: SnapshotMetadata)(p: (InputStream) ⇒ Any) = + private def withInputStream[T](metadata: SnapshotMetadata)(p: (InputStream) ⇒ T): T = withStream(new BufferedInputStream(new FileInputStream(snapshotFile(metadata))), p) private def withStream[A <: Closeable, B](stream: A, p: A ⇒ B): B = diff --git a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala new file mode 100644 index 00000000000..dce19ea3703 --- /dev/null +++ b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala @@ -0,0 +1,196 @@ +package akka.persistence.serialization + +import com.typesafe.config._ + +import akka.actor._ +import akka.persistence._ +import akka.serialization._ +import akka.testkit._ + +object SerializerSpecConfigs { + val common = + """ + serialize-creators = on + serialize-messages = on + """ + + val customSerializers = + """ + akka.actor { + serializers { + my-payload = "akka.persistence.serialization.MyPayloadSerializer" + my-snapshot = "akka.persistence.serialization.MySnapshotSerializer" + } + serialization-bindings { + "akka.persistence.serialization.MyPayload" = my-payload + "akka.persistence.serialization.MySnapshot" = my-snapshot + } + } + """ + + val remoteCommon = + """ + akka { + actor { + provider = "akka.remote.RemoteActorRefProvider" + } + remote { + enabled-transports = ["akka.remote.netty.tcp"] + netty.tcp.hostname = "127.0.0.1" + } + loglevel = ERROR + log-dead-letters = 0 + log-dead-letters-during-shutdown = off + } + """ + + val systemA = "akka.remote.netty.tcp.port = 0" + val systemB = "akka.remote.netty.tcp.port = 0" + + def config(configs: String*): Config = + configs.foldLeft(ConfigFactory.parseString(common))((r, c) ⇒ r.withFallback(ConfigFactory.parseString(c))) +} + +import SerializerSpecConfigs._ + +class SnapshotSerializerPersistenceSpec extends AkkaSpec(config(customSerializers)) { + val serialization = SerializationExtension(system) + + "A snapshot serializer" must { + "handle custom snapshot Serialization" in { + val wrapped = Snapshot(MySnapshot("a")) + val serializer = serialization.findSerializerFor(wrapped) + + val bytes = serializer.toBinary(wrapped) + val deserialized = serializer.fromBinary(bytes, None) + + deserialized must be(Snapshot(MySnapshot(".a."))) + } + } +} + +class MessageSerializerPersistenceSpec extends AkkaSpec(config(customSerializers)) { + val serialization = SerializationExtension(system) + + "A message serializer" when { + "not given a manifest" must { + "handle custom persistent message serialization" in { + val persistent = PersistentImpl(MyPayload("a"), 13, "p1", "c1", true, true, Seq("c1", "c2"), Confirm("p2", 14, "c2"), testActor, testActor) + val serializer = serialization.findSerializerFor(persistent) + + val bytes = serializer.toBinary(persistent) + val deserialized = serializer.fromBinary(bytes, None) + + deserialized must be(persistent.withPayload(MyPayload(".a."))) + } + } + "given a persistent message manifest" must { + "handle custom persistent message serialization" in { + val persistent = PersistentImpl(MyPayload("b"), 13, "p1", "c1", true, true, Seq("c1", "c2"), Confirm("p2", 14, "c2"), testActor, testActor) + val serializer = serialization.findSerializerFor(persistent) + + val bytes = serializer.toBinary(persistent) + val deserialized = serializer.fromBinary(bytes, Some(classOf[PersistentImpl])) + + deserialized must be(persistent.withPayload(MyPayload(".b."))) + } + } + "given a confirmation message manifest" must { + "handle confirmation message serialization" in { + val confirmation = Confirm("x", 2, "y") + val serializer = serialization.findSerializerFor(confirmation) + + val bytes = serializer.toBinary(confirmation) + val deserialized = serializer.fromBinary(bytes, Some(classOf[Confirm])) + + deserialized must be(confirmation) + } + } + } +} + +object MessageSerializerRemotingSpec { + class LocalActor(port: Int) extends Actor { + def receive = { + case m ⇒ context.actorSelection(s"akka.tcp://remote@127.0.0.1:${port}/user/remote") tell (m, sender) + } + } + + class RemoteActor extends Actor { + def receive = { + case Persistent(MyPayload(data), _) ⇒ sender ! data + case Confirm(pid, snr, cid) ⇒ sender ! s"${pid},${snr},${cid}" + } + } + + def port(system: ActorSystem, protocol: String) = + addr(system, protocol).port.get + + def addr(system: ActorSystem, protocol: String) = + system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress +} + +class MessageSerializerRemotingSpec extends AkkaSpec(config(systemA).withFallback(config(customSerializers, remoteCommon))) with ImplicitSender { + import MessageSerializerRemotingSpec._ + + val remoteSystem = ActorSystem("remote", config(systemB).withFallback(config(customSerializers, remoteCommon))) + val localActor = system.actorOf(Props(classOf[LocalActor], port(remoteSystem, "tcp"))) + + override protected def atStartup() { + remoteSystem.actorOf(Props[RemoteActor], "remote") + } + + override def afterTermination() { + remoteSystem.shutdown() + remoteSystem.awaitTermination() + } + + "A message serializer" must { + "custom-serialize persistent messages during remoting" in { + localActor ! Persistent(MyPayload("a")) + expectMsg(".a.") + } + "serialize confirmation messages during remoting" in { + localActor ! Confirm("a", 2, "b") + expectMsg("a,2,b") + + } + } +} + +case class MyPayload(data: String) +case class MySnapshot(data: String) + +class MyPayloadSerializer extends Serializer { + val MyPayloadClass = classOf[MyPayload] + + def identifier: Int = 77123 + def includeManifest: Boolean = true + + def toBinary(o: AnyRef): Array[Byte] = o match { + case MyPayload(data) ⇒ s".${data}".getBytes("UTF-8") + } + + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { + case Some(MyPayloadClass) ⇒ MyPayload(s"${new String(bytes, "UTF-8")}.") + case Some(c) ⇒ throw new Exception(s"unexpected manifest ${c}") + case None ⇒ throw new Exception("no manifest") + } +} + +class MySnapshotSerializer extends Serializer { + val MySnapshotClass = classOf[MySnapshot] + + def identifier: Int = 77124 + def includeManifest: Boolean = true + + def toBinary(o: AnyRef): Array[Byte] = o match { + case MySnapshot(data) ⇒ s".${data}".getBytes("UTF-8") + } + + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { + case Some(MySnapshotClass) ⇒ MySnapshot(s"${new String(bytes, "UTF-8")}.") + case Some(c) ⇒ throw new Exception(s"unexpected manifest ${c}") + case None ⇒ throw new Exception("no manifest") + } +} \ No newline at end of file diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index ea3bbdf1f0b..e80eebfa35f 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -275,7 +275,7 @@ object AkkaBuild extends Build { lazy val persistence = Project( id = "akka-persistence-experimental", base = file("akka-persistence"), - dependencies = Seq(actor, testkit % "test->test"), + dependencies = Seq(actor, remote % "test->test", testkit % "test->test"), settings = defaultSettings ++ scaladocSettings ++ experimentalSettings ++ javadocSettings ++ OSGi.persistence ++ Seq( libraryDependencies ++= Dependencies.persistence, previousArtifact := akkaPreviousArtifact("akka-persistence") @@ -994,7 +994,7 @@ object AkkaBuild extends Build { val transactor = exports(Seq("akka.transactor.*")) - val persistence = exports(Seq("akka.persistence.*")) + val persistence = exports(Seq("akka.persistence.*"), imports = Seq(protobufImport())) val testkit = exports(Seq("akka.testkit.*")) @@ -1104,7 +1104,7 @@ object Dependencies { val transactor = Seq(scalaStm, Test.scalatest, Test.junit) - val persistence = Seq(levelDB, Test.scalatest, Test.junit, Test.commonsIo) + val persistence = Seq(levelDB, protobuf, Test.scalatest, Test.junit, Test.commonsIo) val mailboxes = Seq(Test.scalatest, Test.junit)