diff --git a/Cargo.lock b/Cargo.lock
index 745fe12..507ed25 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4523,8 +4523,8 @@ dependencies = [
[[package]]
name = "torii-client"
-version = "1.8.2"
-source = "git+https://github.com/dojoengine/torii?rev=8378c63#8378c636a6904f17ce04a011d2aee1bc3d84751a"
+version = "1.8.7"
+source = "git+https://github.com/dojoengine/torii?rev=ffd7139#ffd71390e0a71c06892a9681d155515f36efa988"
dependencies = [
"async-trait",
"crypto-bigint",
@@ -4547,8 +4547,8 @@ dependencies = [
[[package]]
name = "torii-grpc-client"
-version = "1.8.2"
-source = "git+https://github.com/dojoengine/torii?rev=8378c63#8378c636a6904f17ce04a011d2aee1bc3d84751a"
+version = "1.8.7"
+source = "git+https://github.com/dojoengine/torii?rev=ffd7139#ffd71390e0a71c06892a9681d155515f36efa988"
dependencies = [
"crypto-bigint",
"dojo-types",
@@ -4573,8 +4573,8 @@ dependencies = [
[[package]]
name = "torii-proto"
-version = "1.8.2"
-source = "git+https://github.com/dojoengine/torii?rev=8378c63#8378c636a6904f17ce04a011d2aee1bc3d84751a"
+version = "1.8.7"
+source = "git+https://github.com/dojoengine/torii?rev=ffd7139#ffd71390e0a71c06892a9681d155515f36efa988"
dependencies = [
"chrono",
"crypto-bigint",
diff --git a/Cargo.toml b/Cargo.toml
index c4d5ce3..1377b32 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -15,9 +15,9 @@ version = "1.8.3"
# Dojo dependencies
dojo-world = { git = "https://github.com/dojoengine/dojo", rev = "0afeb1bc" }
dojo-types = { git = "https://github.com/dojoengine/dojo", rev = "0afeb1bc" }
-torii-proto = { git = "https://github.com/dojoengine/torii", rev = "8378c63" }
-torii-client = { git = "https://github.com/dojoengine/torii", rev = "8378c63" }
-torii-grpc-client = { git = "https://github.com/dojoengine/torii", rev = "8378c63" }
+torii-proto = { git = "https://github.com/dojoengine/torii", rev = "ffd7139" }
+torii-client = { git = "https://github.com/dojoengine/torii", rev = "ffd7139" }
+torii-grpc-client = { git = "https://github.com/dojoengine/torii", rev = "ffd7139" }
# Starknet dependencies
starknet = "0.17.0"
diff --git a/bindings/c/dojo.h b/bindings/c/dojo.h
index ecb85d3..83d8d9e 100644
--- a/bindings/c/dojo.h
+++ b/bindings/c/dojo.h
@@ -29,6 +29,7 @@ struct TokenBalance;
struct TokenContract;
struct Contract;
struct TokenTransfer;
+struct TableSearchResults;
struct Provider;
struct Account;
struct Ty;
@@ -36,8 +37,10 @@ struct Model;
struct Member;
struct AchievementTask;
struct PlayerAchievementProgress;
+struct SearchMatch;
struct EnumOption;
struct TaskProgress;
+struct SearchMatchField;
typedef enum BlockTag {
Latest,
@@ -1052,6 +1055,38 @@ typedef struct TokenTransfer {
struct COptionc_char event_id;
} TokenTransfer;
+typedef struct CArrayTableSearchResults {
+ struct TableSearchResults *data;
+ uintptr_t data_len;
+} CArrayTableSearchResults;
+
+typedef struct SearchResponse {
+ uint32_t total;
+ struct CArrayTableSearchResults results;
+} SearchResponse;
+
+typedef enum ResultSearchResponse_Tag {
+ OkSearchResponse,
+ ErrSearchResponse,
+} ResultSearchResponse_Tag;
+
+typedef struct ResultSearchResponse {
+ ResultSearchResponse_Tag tag;
+ union {
+ struct {
+ struct SearchResponse ok;
+ };
+ struct {
+ struct Error err;
+ };
+ };
+} ResultSearchResponse;
+
+typedef struct SearchQuery {
+ const char *query;
+ uint32_t limit;
+} SearchQuery;
+
typedef enum ResultCArrayFieldElement_Tag {
OkCArrayFieldElement,
ErrCArrayFieldElement,
@@ -1285,6 +1320,17 @@ typedef struct TokenContract {
struct COptionU256 total_supply;
} TokenContract;
+typedef struct CArraySearchMatch {
+ struct SearchMatch *data;
+ uintptr_t data_len;
+} CArraySearchMatch;
+
+typedef struct TableSearchResults {
+ const char *table;
+ uint32_t count;
+ struct CArraySearchMatch matches;
+} TableSearchResults;
+
typedef struct CArrayEnumOption {
struct EnumOption *data;
uintptr_t data_len;
@@ -1384,6 +1430,31 @@ typedef struct PlayerAchievementProgress {
double progress_percentage;
} PlayerAchievementProgress;
+typedef struct CArraySearchMatchField {
+ struct SearchMatchField *data;
+ uintptr_t data_len;
+} CArraySearchMatchField;
+
+typedef enum COptionf64_Tag {
+ Somef64,
+ Nonef64,
+} COptionf64_Tag;
+
+typedef struct COptionf64 {
+ COptionf64_Tag tag;
+ union {
+ struct {
+ double some;
+ };
+ };
+} COptionf64;
+
+typedef struct SearchMatch {
+ const char *id;
+ struct CArraySearchMatchField fields;
+ struct COptionf64 score;
+} SearchMatch;
+
typedef struct EnumOption {
const char *name;
struct Ty *ty;
@@ -1395,6 +1466,11 @@ typedef struct TaskProgress {
bool completed;
} TaskProgress;
+typedef struct SearchMatchField {
+ const char *key;
+ const char *value;
+} SearchMatchField;
+
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
@@ -2034,6 +2110,18 @@ struct Resultbool client_update_token_transfer_subscription(struct ToriiClient *
const struct U256 *token_ids,
uintptr_t token_ids_len);
+/**
+ * Performs a full-text search across indexed entities using FTS5
+ *
+ * # Parameters
+ * * `client` - Pointer to ToriiClient instance
+ * * `query` - Search query containing the search text and limit
+ *
+ * # Returns
+ * Result containing SearchResponse with results grouped by table or error
+ */
+struct ResultSearchResponse client_search(struct ToriiClient *client, struct SearchQuery query);
+
/**
* Serializes a string into a byte array
*
diff --git a/bindings/csharp/dojo.cs b/bindings/csharp/dojo.cs
index 4b08e7f..42541f3 100644
--- a/bindings/csharp/dojo.cs
+++ b/bindings/csharp/dojo.cs
@@ -888,6 +888,8 @@ public struct UniffiVTableCallbackInterfaceTransactionUpdateCallback
+
+
@@ -963,6 +965,10 @@ public static extern RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_publish
public static extern RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_publish_message_batch(IntPtr @ptr,RustBuffer @messages,ref UniffiRustCallStatus _uniffi_out_err
);
+ [DllImport("dojo_uniffi", CallingConvention = CallingConvention.Cdecl)]
+ public static extern RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_search(IntPtr @ptr,RustBuffer @query,ref UniffiRustCallStatus _uniffi_out_err
+ );
+
[DllImport("dojo_uniffi", CallingConvention = CallingConvention.Cdecl)]
public static extern RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_sql(IntPtr @ptr,RustBuffer @query,ref UniffiRustCallStatus _uniffi_out_err
);
@@ -1303,6 +1309,10 @@ public static extern ushort uniffi_dojo_uniffi_checksum_method_toriiclient_publi
public static extern ushort uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch(
);
+ [DllImport("dojo_uniffi", CallingConvention = CallingConvention.Cdecl)]
+ public static extern ushort uniffi_dojo_uniffi_checksum_method_toriiclient_search(
+ );
+
[DllImport("dojo_uniffi", CallingConvention = CallingConvention.Cdecl)]
public static extern ushort uniffi_dojo_uniffi_checksum_method_toriiclient_sql(
);
@@ -1483,6 +1493,12 @@ static void uniffiCheckApiChecksums() {
throw new UniffiContractChecksumException($"uniffi.dojo: uniffi bindings expected function `uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch` checksum `50961`, library returned `{checksum}`");
}
}
+ {
+ var checksum = _UniFFILib.uniffi_dojo_uniffi_checksum_method_toriiclient_search();
+ if (checksum != 24059) {
+ throw new UniffiContractChecksumException($"uniffi.dojo: uniffi bindings expected function `uniffi_dojo_uniffi_checksum_method_toriiclient_search` checksum `24059`, library returned `{checksum}`");
+ }
+ }
{
var checksum = _UniFFILib.uniffi_dojo_uniffi_checksum_method_toriiclient_sql();
if (checksum != 59851) {
@@ -1974,6 +1990,8 @@ internal interface IToriiClient {
///
string[] PublishMessageBatch(Message[] @messages);
///
+ SearchResponse Search(SearchQuery @query);
+ ///
SqlRow[] Sql(string @query);
///
PageEvent StarknetEvents(EventQuery @query);
@@ -2194,6 +2212,15 @@ public string[] PublishMessageBatch(Message[] @messages) {
}
+ ///
+ public SearchResponse Search(SearchQuery @query) {
+ return CallWithPointer(thisPtr => FfiConverterTypeSearchResponse.INSTANCE.Lift(
+ _UniffiHelpers.RustCallWithError(FfiConverterTypeDojoError.INSTANCE, (ref UniffiRustCallStatus _status) =>
+ _UniFFILib.uniffi_dojo_uniffi_fn_method_toriiclient_search(thisPtr, FfiConverterTypeSearchQuery.INSTANCE.Lower(@query), ref _status)
+)));
+ }
+
+
///
public SqlRow[] Sql(string @query) {
return CallWithPointer(thisPtr => FfiConverterSequenceTypeSqlRow.INSTANCE.Lift(
@@ -4080,6 +4107,130 @@ public override void Write(Query value, BigEndianStream stream) {
+internal record SearchField (
+ string @key,
+ string @value
+) {
+}
+
+class FfiConverterTypeSearchField: FfiConverterRustBuffer {
+ public static FfiConverterTypeSearchField INSTANCE = new FfiConverterTypeSearchField();
+
+ public override SearchField Read(BigEndianStream stream) {
+ return new SearchField(
+ @key: FfiConverterString.INSTANCE.Read(stream),
+ @value: FfiConverterString.INSTANCE.Read(stream)
+ );
+ }
+
+ public override int AllocationSize(SearchField value) {
+ return 0
+ + FfiConverterString.INSTANCE.AllocationSize(value.@key)
+ + FfiConverterString.INSTANCE.AllocationSize(value.@value);
+ }
+
+ public override void Write(SearchField value, BigEndianStream stream) {
+ FfiConverterString.INSTANCE.Write(value.@key, stream);
+ FfiConverterString.INSTANCE.Write(value.@value, stream);
+ }
+}
+
+
+
+internal record SearchMatch (
+ string @id,
+ SearchField[] @fields,
+ double? @score
+) {
+}
+
+class FfiConverterTypeSearchMatch: FfiConverterRustBuffer {
+ public static FfiConverterTypeSearchMatch INSTANCE = new FfiConverterTypeSearchMatch();
+
+ public override SearchMatch Read(BigEndianStream stream) {
+ return new SearchMatch(
+ @id: FfiConverterString.INSTANCE.Read(stream),
+ @fields: FfiConverterSequenceTypeSearchField.INSTANCE.Read(stream),
+ @score: FfiConverterOptionalDouble.INSTANCE.Read(stream)
+ );
+ }
+
+ public override int AllocationSize(SearchMatch value) {
+ return 0
+ + FfiConverterString.INSTANCE.AllocationSize(value.@id)
+ + FfiConverterSequenceTypeSearchField.INSTANCE.AllocationSize(value.@fields)
+ + FfiConverterOptionalDouble.INSTANCE.AllocationSize(value.@score);
+ }
+
+ public override void Write(SearchMatch value, BigEndianStream stream) {
+ FfiConverterString.INSTANCE.Write(value.@id, stream);
+ FfiConverterSequenceTypeSearchField.INSTANCE.Write(value.@fields, stream);
+ FfiConverterOptionalDouble.INSTANCE.Write(value.@score, stream);
+ }
+}
+
+
+
+internal record SearchQuery (
+ string @query,
+ uint @limit
+) {
+}
+
+class FfiConverterTypeSearchQuery: FfiConverterRustBuffer {
+ public static FfiConverterTypeSearchQuery INSTANCE = new FfiConverterTypeSearchQuery();
+
+ public override SearchQuery Read(BigEndianStream stream) {
+ return new SearchQuery(
+ @query: FfiConverterString.INSTANCE.Read(stream),
+ @limit: FfiConverterUInt32.INSTANCE.Read(stream)
+ );
+ }
+
+ public override int AllocationSize(SearchQuery value) {
+ return 0
+ + FfiConverterString.INSTANCE.AllocationSize(value.@query)
+ + FfiConverterUInt32.INSTANCE.AllocationSize(value.@limit);
+ }
+
+ public override void Write(SearchQuery value, BigEndianStream stream) {
+ FfiConverterString.INSTANCE.Write(value.@query, stream);
+ FfiConverterUInt32.INSTANCE.Write(value.@limit, stream);
+ }
+}
+
+
+
+internal record SearchResponse (
+ uint @total,
+ TableSearchResults[] @results
+) {
+}
+
+class FfiConverterTypeSearchResponse: FfiConverterRustBuffer {
+ public static FfiConverterTypeSearchResponse INSTANCE = new FfiConverterTypeSearchResponse();
+
+ public override SearchResponse Read(BigEndianStream stream) {
+ return new SearchResponse(
+ @total: FfiConverterUInt32.INSTANCE.Read(stream),
+ @results: FfiConverterSequenceTypeTableSearchResults.INSTANCE.Read(stream)
+ );
+ }
+
+ public override int AllocationSize(SearchResponse value) {
+ return 0
+ + FfiConverterUInt32.INSTANCE.AllocationSize(value.@total)
+ + FfiConverterSequenceTypeTableSearchResults.INSTANCE.AllocationSize(value.@results);
+ }
+
+ public override void Write(SearchResponse value, BigEndianStream stream) {
+ FfiConverterUInt32.INSTANCE.Write(value.@total, stream);
+ FfiConverterSequenceTypeTableSearchResults.INSTANCE.Write(value.@results, stream);
+ }
+}
+
+
+
internal record Signature (
FieldElement @r,
FieldElement @s
@@ -4196,6 +4347,40 @@ public override void Write(Struct value, BigEndianStream stream) {
+internal record TableSearchResults (
+ string @table,
+ uint @count,
+ SearchMatch[] @matches
+) {
+}
+
+class FfiConverterTypeTableSearchResults: FfiConverterRustBuffer {
+ public static FfiConverterTypeTableSearchResults INSTANCE = new FfiConverterTypeTableSearchResults();
+
+ public override TableSearchResults Read(BigEndianStream stream) {
+ return new TableSearchResults(
+ @table: FfiConverterString.INSTANCE.Read(stream),
+ @count: FfiConverterUInt32.INSTANCE.Read(stream),
+ @matches: FfiConverterSequenceTypeSearchMatch.INSTANCE.Read(stream)
+ );
+ }
+
+ public override int AllocationSize(TableSearchResults value) {
+ return 0
+ + FfiConverterString.INSTANCE.AllocationSize(value.@table)
+ + FfiConverterUInt32.INSTANCE.AllocationSize(value.@count)
+ + FfiConverterSequenceTypeSearchMatch.INSTANCE.AllocationSize(value.@matches);
+ }
+
+ public override void Write(TableSearchResults value, BigEndianStream stream) {
+ FfiConverterString.INSTANCE.Write(value.@table, stream);
+ FfiConverterUInt32.INSTANCE.Write(value.@count, stream);
+ FfiConverterSequenceTypeSearchMatch.INSTANCE.Write(value.@matches, stream);
+ }
+}
+
+
+
internal record TaskProgress (
string @taskId,
uint @count,
@@ -6467,6 +6652,37 @@ public override void Write(ulong? value, BigEndianStream stream) {
+class FfiConverterOptionalDouble: FfiConverterRustBuffer {
+ public static FfiConverterOptionalDouble INSTANCE = new FfiConverterOptionalDouble();
+
+ public override double? Read(BigEndianStream stream) {
+ if (stream.ReadByte() == 0) {
+ return null;
+ }
+ return FfiConverterDouble.INSTANCE.Read(stream);
+ }
+
+ public override int AllocationSize(double? value) {
+ if (value == null) {
+ return 1;
+ } else {
+ return 1 + FfiConverterDouble.INSTANCE.AllocationSize((double)value);
+ }
+ }
+
+ public override void Write(double? value, BigEndianStream stream) {
+ if (value == null) {
+ stream.WriteByte(0);
+ } else {
+ stream.WriteByte(1);
+ FfiConverterDouble.INSTANCE.Write((double)value, stream);
+ }
+ }
+}
+
+
+
+
class FfiConverterOptionalBoolean: FfiConverterRustBuffer {
public static FfiConverterOptionalBoolean INSTANCE = new FfiConverterOptionalBoolean();
@@ -7604,6 +7820,98 @@ public override void Write(PlayerAchievementProgress[] value, BigEndianStream st
+class FfiConverterSequenceTypeSearchField: FfiConverterRustBuffer {
+ public static FfiConverterSequenceTypeSearchField INSTANCE = new FfiConverterSequenceTypeSearchField();
+
+ public override SearchField[] Read(BigEndianStream stream) {
+ var length = stream.ReadInt();
+ if (length == 0) {
+ return [];
+ }
+
+ var result = new SearchField[(length)];
+ var readFn = FfiConverterTypeSearchField.INSTANCE.Read;
+ for (int i = 0; i < length; i++) {
+ result[i] = readFn(stream);
+ }
+ return result;
+ }
+
+ public override int AllocationSize(SearchField[] value) {
+ var sizeForLength = 4;
+
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ return sizeForLength;
+ }
+
+ var allocationSizeFn = FfiConverterTypeSearchField.INSTANCE.AllocationSize;
+ var sizeForItems = value.Sum(item => allocationSizeFn(item));
+ return sizeForLength + sizeForItems;
+ }
+
+ public override void Write(SearchField[] value, BigEndianStream stream) {
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ stream.WriteInt(0);
+ return;
+ }
+
+ stream.WriteInt(value.Length);
+ var writerFn = FfiConverterTypeSearchField.INSTANCE.Write;
+ value.ForEach(item => writerFn(item, stream));
+ }
+}
+
+
+
+
+class FfiConverterSequenceTypeSearchMatch: FfiConverterRustBuffer {
+ public static FfiConverterSequenceTypeSearchMatch INSTANCE = new FfiConverterSequenceTypeSearchMatch();
+
+ public override SearchMatch[] Read(BigEndianStream stream) {
+ var length = stream.ReadInt();
+ if (length == 0) {
+ return [];
+ }
+
+ var result = new SearchMatch[(length)];
+ var readFn = FfiConverterTypeSearchMatch.INSTANCE.Read;
+ for (int i = 0; i < length; i++) {
+ result[i] = readFn(stream);
+ }
+ return result;
+ }
+
+ public override int AllocationSize(SearchMatch[] value) {
+ var sizeForLength = 4;
+
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ return sizeForLength;
+ }
+
+ var allocationSizeFn = FfiConverterTypeSearchMatch.INSTANCE.AllocationSize;
+ var sizeForItems = value.Sum(item => allocationSizeFn(item));
+ return sizeForLength + sizeForItems;
+ }
+
+ public override void Write(SearchMatch[] value, BigEndianStream stream) {
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ stream.WriteInt(0);
+ return;
+ }
+
+ stream.WriteInt(value.Length);
+ var writerFn = FfiConverterTypeSearchMatch.INSTANCE.Write;
+ value.ForEach(item => writerFn(item, stream));
+ }
+}
+
+
+
+
class FfiConverterSequenceTypeSqlField: FfiConverterRustBuffer {
public static FfiConverterSequenceTypeSqlField INSTANCE = new FfiConverterSequenceTypeSqlField();
@@ -7742,6 +8050,52 @@ public override void Write(Struct[] value, BigEndianStream stream) {
+class FfiConverterSequenceTypeTableSearchResults: FfiConverterRustBuffer {
+ public static FfiConverterSequenceTypeTableSearchResults INSTANCE = new FfiConverterSequenceTypeTableSearchResults();
+
+ public override TableSearchResults[] Read(BigEndianStream stream) {
+ var length = stream.ReadInt();
+ if (length == 0) {
+ return [];
+ }
+
+ var result = new TableSearchResults[(length)];
+ var readFn = FfiConverterTypeTableSearchResults.INSTANCE.Read;
+ for (int i = 0; i < length; i++) {
+ result[i] = readFn(stream);
+ }
+ return result;
+ }
+
+ public override int AllocationSize(TableSearchResults[] value) {
+ var sizeForLength = 4;
+
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ return sizeForLength;
+ }
+
+ var allocationSizeFn = FfiConverterTypeTableSearchResults.INSTANCE.AllocationSize;
+ var sizeForItems = value.Sum(item => allocationSizeFn(item));
+ return sizeForLength + sizeForItems;
+ }
+
+ public override void Write(TableSearchResults[] value, BigEndianStream stream) {
+ // details/1-empty-list-as-default-method-parameter.md
+ if (value == null) {
+ stream.WriteInt(0);
+ return;
+ }
+
+ stream.WriteInt(value.Length);
+ var writerFn = FfiConverterTypeTableSearchResults.INSTANCE.Write;
+ value.ForEach(item => writerFn(item, stream));
+ }
+}
+
+
+
+
class FfiConverterSequenceTypeTaskProgress: FfiConverterRustBuffer {
public static FfiConverterSequenceTypeTaskProgress INSTANCE = new FfiConverterSequenceTypeTaskProgress();
diff --git a/bindings/go/dojo.go b/bindings/go/dojo.go
index 626cfe3..b242e04 100644
--- a/bindings/go/dojo.go
+++ b/bindings/go/dojo.go
@@ -1,7 +1,5 @@
package dojo
-// #cgo CFLAGS: -I${SRCDIR}
-// #cgo LDFLAGS: -L${SRCDIR}/../../target/release -ldojo_uniffi
// #include
import "C"
@@ -453,6 +451,15 @@ func uniffiCheckChecksums() {
panic("dojo: uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch: UniFFI API checksum mismatch")
}
}
+ {
+ checksum := rustCall(func(_uniffiStatus *C.RustCallStatus) C.uint16_t {
+ return C.uniffi_dojo_uniffi_checksum_method_toriiclient_search()
+ })
+ if checksum != 24059 {
+ // If this happens try cleaning and rebuilding your project
+ panic("dojo: uniffi_dojo_uniffi_checksum_method_toriiclient_search: UniFFI API checksum mismatch")
+ }
+ }
{
checksum := rustCall(func(_uniffiStatus *C.RustCallStatus) C.uint16_t {
return C.uniffi_dojo_uniffi_checksum_method_toriiclient_sql()
@@ -1052,6 +1059,7 @@ type ToriiClientInterface interface {
PlayerAchievements(query PlayerAchievementQuery) (PagePlayerAchievement, error)
PublishMessage(message Message) (string, error)
PublishMessageBatch(messages []Message) ([]string, error)
+ Search(query SearchQuery) (SearchResponse, error)
Sql(query string) ([]SqlRow, error)
StarknetEvents(query EventQuery) (PageEvent, error)
SubscribeEntityUpdates(clause *Clause, worldAddresses []FieldElement, callback EntityUpdateCallback) (uint64, error)
@@ -1275,6 +1283,23 @@ func (_self *ToriiClient) PublishMessageBatch(messages []Message) ([]string, err
}
}
+func (_self *ToriiClient) Search(query SearchQuery) (SearchResponse, error) {
+ _pointer := _self.ffiObject.incrementPointer("*ToriiClient")
+ defer _self.ffiObject.decrementPointer()
+ _uniffiRV, _uniffiErr := rustCallWithError[DojoError](FfiConverterDojoError{}, func(_uniffiStatus *C.RustCallStatus) RustBufferI {
+ return GoRustBuffer{
+ inner: C.uniffi_dojo_uniffi_fn_method_toriiclient_search(
+ _pointer, FfiConverterSearchQueryINSTANCE.Lower(query), _uniffiStatus),
+ }
+ })
+ if _uniffiErr != nil {
+ var _uniffiDefaultValue SearchResponse
+ return _uniffiDefaultValue, _uniffiErr
+ } else {
+ return FfiConverterSearchResponseINSTANCE.Lift(_uniffiRV), nil
+ }
+}
+
func (_self *ToriiClient) Sql(query string) ([]SqlRow, error) {
_pointer := _self.ffiObject.incrementPointer("*ToriiClient")
defer _self.ffiObject.decrementPointer()
@@ -3714,6 +3739,170 @@ func (_ FfiDestroyerQuery) Destroy(value Query) {
value.Destroy()
}
+type SearchField struct {
+ Key string
+ Value string
+}
+
+func (r *SearchField) Destroy() {
+ FfiDestroyerString{}.Destroy(r.Key)
+ FfiDestroyerString{}.Destroy(r.Value)
+}
+
+type FfiConverterSearchField struct{}
+
+var FfiConverterSearchFieldINSTANCE = FfiConverterSearchField{}
+
+func (c FfiConverterSearchField) Lift(rb RustBufferI) SearchField {
+ return LiftFromRustBuffer[SearchField](c, rb)
+}
+
+func (c FfiConverterSearchField) Read(reader io.Reader) SearchField {
+ return SearchField{
+ FfiConverterStringINSTANCE.Read(reader),
+ FfiConverterStringINSTANCE.Read(reader),
+ }
+}
+
+func (c FfiConverterSearchField) Lower(value SearchField) C.RustBuffer {
+ return LowerIntoRustBuffer[SearchField](c, value)
+}
+
+func (c FfiConverterSearchField) Write(writer io.Writer, value SearchField) {
+ FfiConverterStringINSTANCE.Write(writer, value.Key)
+ FfiConverterStringINSTANCE.Write(writer, value.Value)
+}
+
+type FfiDestroyerSearchField struct{}
+
+func (_ FfiDestroyerSearchField) Destroy(value SearchField) {
+ value.Destroy()
+}
+
+type SearchMatch struct {
+ Id string
+ Fields []SearchField
+ Score *float64
+}
+
+func (r *SearchMatch) Destroy() {
+ FfiDestroyerString{}.Destroy(r.Id)
+ FfiDestroyerSequenceSearchField{}.Destroy(r.Fields)
+ FfiDestroyerOptionalFloat64{}.Destroy(r.Score)
+}
+
+type FfiConverterSearchMatch struct{}
+
+var FfiConverterSearchMatchINSTANCE = FfiConverterSearchMatch{}
+
+func (c FfiConverterSearchMatch) Lift(rb RustBufferI) SearchMatch {
+ return LiftFromRustBuffer[SearchMatch](c, rb)
+}
+
+func (c FfiConverterSearchMatch) Read(reader io.Reader) SearchMatch {
+ return SearchMatch{
+ FfiConverterStringINSTANCE.Read(reader),
+ FfiConverterSequenceSearchFieldINSTANCE.Read(reader),
+ FfiConverterOptionalFloat64INSTANCE.Read(reader),
+ }
+}
+
+func (c FfiConverterSearchMatch) Lower(value SearchMatch) C.RustBuffer {
+ return LowerIntoRustBuffer[SearchMatch](c, value)
+}
+
+func (c FfiConverterSearchMatch) Write(writer io.Writer, value SearchMatch) {
+ FfiConverterStringINSTANCE.Write(writer, value.Id)
+ FfiConverterSequenceSearchFieldINSTANCE.Write(writer, value.Fields)
+ FfiConverterOptionalFloat64INSTANCE.Write(writer, value.Score)
+}
+
+type FfiDestroyerSearchMatch struct{}
+
+func (_ FfiDestroyerSearchMatch) Destroy(value SearchMatch) {
+ value.Destroy()
+}
+
+type SearchQuery struct {
+ Query string
+ Limit uint32
+}
+
+func (r *SearchQuery) Destroy() {
+ FfiDestroyerString{}.Destroy(r.Query)
+ FfiDestroyerUint32{}.Destroy(r.Limit)
+}
+
+type FfiConverterSearchQuery struct{}
+
+var FfiConverterSearchQueryINSTANCE = FfiConverterSearchQuery{}
+
+func (c FfiConverterSearchQuery) Lift(rb RustBufferI) SearchQuery {
+ return LiftFromRustBuffer[SearchQuery](c, rb)
+}
+
+func (c FfiConverterSearchQuery) Read(reader io.Reader) SearchQuery {
+ return SearchQuery{
+ FfiConverterStringINSTANCE.Read(reader),
+ FfiConverterUint32INSTANCE.Read(reader),
+ }
+}
+
+func (c FfiConverterSearchQuery) Lower(value SearchQuery) C.RustBuffer {
+ return LowerIntoRustBuffer[SearchQuery](c, value)
+}
+
+func (c FfiConverterSearchQuery) Write(writer io.Writer, value SearchQuery) {
+ FfiConverterStringINSTANCE.Write(writer, value.Query)
+ FfiConverterUint32INSTANCE.Write(writer, value.Limit)
+}
+
+type FfiDestroyerSearchQuery struct{}
+
+func (_ FfiDestroyerSearchQuery) Destroy(value SearchQuery) {
+ value.Destroy()
+}
+
+type SearchResponse struct {
+ Total uint32
+ Results []TableSearchResults
+}
+
+func (r *SearchResponse) Destroy() {
+ FfiDestroyerUint32{}.Destroy(r.Total)
+ FfiDestroyerSequenceTableSearchResults{}.Destroy(r.Results)
+}
+
+type FfiConverterSearchResponse struct{}
+
+var FfiConverterSearchResponseINSTANCE = FfiConverterSearchResponse{}
+
+func (c FfiConverterSearchResponse) Lift(rb RustBufferI) SearchResponse {
+ return LiftFromRustBuffer[SearchResponse](c, rb)
+}
+
+func (c FfiConverterSearchResponse) Read(reader io.Reader) SearchResponse {
+ return SearchResponse{
+ FfiConverterUint32INSTANCE.Read(reader),
+ FfiConverterSequenceTableSearchResultsINSTANCE.Read(reader),
+ }
+}
+
+func (c FfiConverterSearchResponse) Lower(value SearchResponse) C.RustBuffer {
+ return LowerIntoRustBuffer[SearchResponse](c, value)
+}
+
+func (c FfiConverterSearchResponse) Write(writer io.Writer, value SearchResponse) {
+ FfiConverterUint32INSTANCE.Write(writer, value.Total)
+ FfiConverterSequenceTableSearchResultsINSTANCE.Write(writer, value.Results)
+}
+
+type FfiDestroyerSearchResponse struct{}
+
+func (_ FfiDestroyerSearchResponse) Destroy(value SearchResponse) {
+ value.Destroy()
+}
+
type Signature struct {
R FieldElement
S FieldElement
@@ -3870,6 +4059,50 @@ func (_ FfiDestroyerStruct) Destroy(value Struct) {
value.Destroy()
}
+type TableSearchResults struct {
+ Table string
+ Count uint32
+ Matches []SearchMatch
+}
+
+func (r *TableSearchResults) Destroy() {
+ FfiDestroyerString{}.Destroy(r.Table)
+ FfiDestroyerUint32{}.Destroy(r.Count)
+ FfiDestroyerSequenceSearchMatch{}.Destroy(r.Matches)
+}
+
+type FfiConverterTableSearchResults struct{}
+
+var FfiConverterTableSearchResultsINSTANCE = FfiConverterTableSearchResults{}
+
+func (c FfiConverterTableSearchResults) Lift(rb RustBufferI) TableSearchResults {
+ return LiftFromRustBuffer[TableSearchResults](c, rb)
+}
+
+func (c FfiConverterTableSearchResults) Read(reader io.Reader) TableSearchResults {
+ return TableSearchResults{
+ FfiConverterStringINSTANCE.Read(reader),
+ FfiConverterUint32INSTANCE.Read(reader),
+ FfiConverterSequenceSearchMatchINSTANCE.Read(reader),
+ }
+}
+
+func (c FfiConverterTableSearchResults) Lower(value TableSearchResults) C.RustBuffer {
+ return LowerIntoRustBuffer[TableSearchResults](c, value)
+}
+
+func (c FfiConverterTableSearchResults) Write(writer io.Writer, value TableSearchResults) {
+ FfiConverterStringINSTANCE.Write(writer, value.Table)
+ FfiConverterUint32INSTANCE.Write(writer, value.Count)
+ FfiConverterSequenceSearchMatchINSTANCE.Write(writer, value.Matches)
+}
+
+type FfiDestroyerTableSearchResults struct{}
+
+func (_ FfiDestroyerTableSearchResults) Destroy(value TableSearchResults) {
+ value.Destroy()
+}
+
type TaskProgress struct {
TaskId string
Count uint32
@@ -6479,6 +6712,43 @@ func (_ FfiDestroyerOptionalUint64) Destroy(value *uint64) {
}
}
+type FfiConverterOptionalFloat64 struct{}
+
+var FfiConverterOptionalFloat64INSTANCE = FfiConverterOptionalFloat64{}
+
+func (c FfiConverterOptionalFloat64) Lift(rb RustBufferI) *float64 {
+ return LiftFromRustBuffer[*float64](c, rb)
+}
+
+func (_ FfiConverterOptionalFloat64) Read(reader io.Reader) *float64 {
+ if readInt8(reader) == 0 {
+ return nil
+ }
+ temp := FfiConverterFloat64INSTANCE.Read(reader)
+ return &temp
+}
+
+func (c FfiConverterOptionalFloat64) Lower(value *float64) C.RustBuffer {
+ return LowerIntoRustBuffer[*float64](c, value)
+}
+
+func (_ FfiConverterOptionalFloat64) Write(writer io.Writer, value *float64) {
+ if value == nil {
+ writeInt8(writer, 0)
+ } else {
+ writeInt8(writer, 1)
+ FfiConverterFloat64INSTANCE.Write(writer, *value)
+ }
+}
+
+type FfiDestroyerOptionalFloat64 struct{}
+
+func (_ FfiDestroyerOptionalFloat64) Destroy(value *float64) {
+ if value != nil {
+ FfiDestroyerFloat64{}.Destroy(*value)
+ }
+}
+
type FfiConverterOptionalBool struct{}
var FfiConverterOptionalBoolINSTANCE = FfiConverterOptionalBool{}
@@ -7598,6 +7868,92 @@ func (FfiDestroyerSequencePlayerAchievementProgress) Destroy(sequence []PlayerAc
}
}
+type FfiConverterSequenceSearchField struct{}
+
+var FfiConverterSequenceSearchFieldINSTANCE = FfiConverterSequenceSearchField{}
+
+func (c FfiConverterSequenceSearchField) Lift(rb RustBufferI) []SearchField {
+ return LiftFromRustBuffer[[]SearchField](c, rb)
+}
+
+func (c FfiConverterSequenceSearchField) Read(reader io.Reader) []SearchField {
+ length := readInt32(reader)
+ if length == 0 {
+ return nil
+ }
+ result := make([]SearchField, 0, length)
+ for i := int32(0); i < length; i++ {
+ result = append(result, FfiConverterSearchFieldINSTANCE.Read(reader))
+ }
+ return result
+}
+
+func (c FfiConverterSequenceSearchField) Lower(value []SearchField) C.RustBuffer {
+ return LowerIntoRustBuffer[[]SearchField](c, value)
+}
+
+func (c FfiConverterSequenceSearchField) Write(writer io.Writer, value []SearchField) {
+ if len(value) > math.MaxInt32 {
+ panic("[]SearchField is too large to fit into Int32")
+ }
+
+ writeInt32(writer, int32(len(value)))
+ for _, item := range value {
+ FfiConverterSearchFieldINSTANCE.Write(writer, item)
+ }
+}
+
+type FfiDestroyerSequenceSearchField struct{}
+
+func (FfiDestroyerSequenceSearchField) Destroy(sequence []SearchField) {
+ for _, value := range sequence {
+ FfiDestroyerSearchField{}.Destroy(value)
+ }
+}
+
+type FfiConverterSequenceSearchMatch struct{}
+
+var FfiConverterSequenceSearchMatchINSTANCE = FfiConverterSequenceSearchMatch{}
+
+func (c FfiConverterSequenceSearchMatch) Lift(rb RustBufferI) []SearchMatch {
+ return LiftFromRustBuffer[[]SearchMatch](c, rb)
+}
+
+func (c FfiConverterSequenceSearchMatch) Read(reader io.Reader) []SearchMatch {
+ length := readInt32(reader)
+ if length == 0 {
+ return nil
+ }
+ result := make([]SearchMatch, 0, length)
+ for i := int32(0); i < length; i++ {
+ result = append(result, FfiConverterSearchMatchINSTANCE.Read(reader))
+ }
+ return result
+}
+
+func (c FfiConverterSequenceSearchMatch) Lower(value []SearchMatch) C.RustBuffer {
+ return LowerIntoRustBuffer[[]SearchMatch](c, value)
+}
+
+func (c FfiConverterSequenceSearchMatch) Write(writer io.Writer, value []SearchMatch) {
+ if len(value) > math.MaxInt32 {
+ panic("[]SearchMatch is too large to fit into Int32")
+ }
+
+ writeInt32(writer, int32(len(value)))
+ for _, item := range value {
+ FfiConverterSearchMatchINSTANCE.Write(writer, item)
+ }
+}
+
+type FfiDestroyerSequenceSearchMatch struct{}
+
+func (FfiDestroyerSequenceSearchMatch) Destroy(sequence []SearchMatch) {
+ for _, value := range sequence {
+ FfiDestroyerSearchMatch{}.Destroy(value)
+ }
+}
+
type FfiConverterSequenceSqlField struct{}
var FfiConverterSequenceSqlFieldINSTANCE = FfiConverterSequenceSqlField{}
@@ -7727,6 +8083,49 @@ func (FfiDestroyerSequenceStruct) Destroy(sequence []Struct) {
}
}
+type FfiConverterSequenceTableSearchResults struct{}
+
+var FfiConverterSequenceTableSearchResultsINSTANCE = FfiConverterSequenceTableSearchResults{}
+
+func (c FfiConverterSequenceTableSearchResults) Lift(rb RustBufferI) []TableSearchResults {
+ return LiftFromRustBuffer[[]TableSearchResults](c, rb)
+}
+
+func (c FfiConverterSequenceTableSearchResults) Read(reader io.Reader) []TableSearchResults {
+ length := readInt32(reader)
+ if length == 0 {
+ return nil
+ }
+ result := make([]TableSearchResults, 0, length)
+ for i := int32(0); i < length; i++ {
+ result = append(result, FfiConverterTableSearchResultsINSTANCE.Read(reader))
+ }
+ return result
+}
+
+func (c FfiConverterSequenceTableSearchResults) Lower(value []TableSearchResults) C.RustBuffer {
+ return LowerIntoRustBuffer[[]TableSearchResults](c, value)
+}
+
+func (c FfiConverterSequenceTableSearchResults) Write(writer io.Writer, value []TableSearchResults) {
+ if len(value) > math.MaxInt32 {
+ panic("[]TableSearchResults is too large to fit into Int32")
+ }
+
+ writeInt32(writer, int32(len(value)))
+ for _, item := range value {
+ FfiConverterTableSearchResultsINSTANCE.Write(writer, item)
+ }
+}
+
+type FfiDestroyerSequenceTableSearchResults struct{}
+
+func (FfiDestroyerSequenceTableSearchResults) Destroy(sequence []TableSearchResults) {
+ for _, value := range sequence {
+ FfiDestroyerTableSearchResults{}.Destroy(value)
+ }
+}
+
type FfiConverterSequenceTaskProgress struct{}
var FfiConverterSequenceTaskProgressINSTANCE = FfiConverterSequenceTaskProgress{}
diff --git a/bindings/go/dojo.h b/bindings/go/dojo.h
index 7b3198a..370b7b5 100644
--- a/bindings/go/dojo.h
+++ b/bindings/go/dojo.h
@@ -638,6 +638,11 @@ RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_publish_message(void* ptr, R
RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_publish_message_batch(void* ptr, RustBuffer messages, RustCallStatus *out_status
);
#endif
+#ifndef UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_FN_METHOD_TORIICLIENT_SEARCH
+#define UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_FN_METHOD_TORIICLIENT_SEARCH
+RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_search(void* ptr, RustBuffer query, RustCallStatus *out_status
+);
+#endif
#ifndef UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_FN_METHOD_TORIICLIENT_SQL
#define UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_FN_METHOD_TORIICLIENT_SQL
RustBuffer uniffi_dojo_uniffi_fn_method_toriiclient_sql(void* ptr, RustBuffer query, RustCallStatus *out_status
@@ -1072,6 +1077,12 @@ uint16_t uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message(void
#define UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_CHECKSUM_METHOD_TORIICLIENT_PUBLISH_MESSAGE_BATCH
uint16_t uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch(void
+);
+#endif
+#ifndef UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_CHECKSUM_METHOD_TORIICLIENT_SEARCH
+#define UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_CHECKSUM_METHOD_TORIICLIENT_SEARCH
+uint16_t uniffi_dojo_uniffi_checksum_method_toriiclient_search(void
+
);
#endif
#ifndef UNIFFI_FFIDEF_UNIFFI_DOJO_UNIFFI_CHECKSUM_METHOD_TORIICLIENT_SQL
diff --git a/bindings/kotlin/com/dojoengine/dojo/dojo.kt b/bindings/kotlin/com/dojoengine/dojo/dojo.kt
index 360551d..31f6776 100644
--- a/bindings/kotlin/com/dojoengine/dojo/dojo.kt
+++ b/bindings/kotlin/com/dojoengine/dojo/dojo.kt
@@ -796,6 +796,8 @@ internal object IntegrityCheckingUniffiLib {
): Short
external fun uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch(
): Short
+ external fun uniffi_dojo_uniffi_checksum_method_toriiclient_search(
+ ): Short
external fun uniffi_dojo_uniffi_checksum_method_toriiclient_sql(
): Short
external fun uniffi_dojo_uniffi_checksum_method_toriiclient_starknet_events(
@@ -899,6 +901,8 @@ external fun uniffi_dojo_uniffi_fn_method_toriiclient_publish_message(`ptr`: Lon
): RustBuffer.ByValue
external fun uniffi_dojo_uniffi_fn_method_toriiclient_publish_message_batch(`ptr`: Long,`messages`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
): RustBuffer.ByValue
+external fun uniffi_dojo_uniffi_fn_method_toriiclient_search(`ptr`: Long,`query`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
+): RustBuffer.ByValue
external fun uniffi_dojo_uniffi_fn_method_toriiclient_sql(`ptr`: Long,`query`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
): RustBuffer.ByValue
external fun uniffi_dojo_uniffi_fn_method_toriiclient_starknet_events(`ptr`: Long,`query`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
@@ -1087,6 +1091,9 @@ private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) {
if (lib.uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch() != 2146.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
+ if (lib.uniffi_dojo_uniffi_checksum_method_toriiclient_search() != 20622.toShort()) {
+ throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
+ }
if (lib.uniffi_dojo_uniffi_checksum_method_toriiclient_sql() != 38286.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
@@ -1758,6 +1765,8 @@ public interface ToriiClientInterface {
fun `publishMessageBatch`(`messages`: List): List
+ fun `search`(`query`: SearchQuery): SearchResponse
+
fun `sql`(`query`: kotlin.String): List
fun `starknetEvents`(`query`: EventQuery): PageEvent
@@ -2045,6 +2054,20 @@ open class ToriiClient: Disposable, AutoCloseable, ToriiClientInterface
+ @Throws(DojoException::class)override fun `search`(`query`: SearchQuery): SearchResponse {
+ return FfiConverterTypeSearchResponse.lift(
+ callWithHandle {
+ uniffiRustCallWithError(DojoException) { _status ->
+ UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_search(
+ it,
+ FfiConverterTypeSearchQuery.lower(`query`),_status)
+}
+ }
+ )
+ }
+
+
+
@Throws(DojoException::class)override fun `sql`(`query`: kotlin.String): List {
return FfiConverterSequenceTypeSqlRow.lift(
callWithHandle {
@@ -4371,6 +4394,155 @@ public object FfiConverterTypeQuery: FfiConverterRustBuffer {
+data class SearchField (
+ val `key`: kotlin.String
+ ,
+ val `value`: kotlin.String
+
+){
+
+
+
+ companion object
+}
+
+/**
+ * @suppress
+ */
+public object FfiConverterTypeSearchField: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): SearchField {
+ return SearchField(
+ FfiConverterString.read(buf),
+ FfiConverterString.read(buf),
+ )
+ }
+
+ override fun allocationSize(value: SearchField) = (
+ FfiConverterString.allocationSize(value.`key`) +
+ FfiConverterString.allocationSize(value.`value`)
+ )
+
+ override fun write(value: SearchField, buf: ByteBuffer) {
+ FfiConverterString.write(value.`key`, buf)
+ FfiConverterString.write(value.`value`, buf)
+ }
+}
+
+
+
+data class SearchMatch (
+ val `id`: kotlin.String
+ ,
+ val `fields`: List
+ ,
+ val `score`: kotlin.Double?
+
+){
+
+
+
+ companion object
+}
+
+/**
+ * @suppress
+ */
+public object FfiConverterTypeSearchMatch: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): SearchMatch {
+ return SearchMatch(
+ FfiConverterString.read(buf),
+ FfiConverterSequenceTypeSearchField.read(buf),
+ FfiConverterOptionalDouble.read(buf),
+ )
+ }
+
+ override fun allocationSize(value: SearchMatch) = (
+ FfiConverterString.allocationSize(value.`id`) +
+ FfiConverterSequenceTypeSearchField.allocationSize(value.`fields`) +
+ FfiConverterOptionalDouble.allocationSize(value.`score`)
+ )
+
+ override fun write(value: SearchMatch, buf: ByteBuffer) {
+ FfiConverterString.write(value.`id`, buf)
+ FfiConverterSequenceTypeSearchField.write(value.`fields`, buf)
+ FfiConverterOptionalDouble.write(value.`score`, buf)
+ }
+}
+
+
+
+data class SearchQuery (
+ val `query`: kotlin.String
+ ,
+ val `limit`: kotlin.UInt
+
+){
+
+
+
+ companion object
+}
+
+/**
+ * @suppress
+ */
+public object FfiConverterTypeSearchQuery: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): SearchQuery {
+ return SearchQuery(
+ FfiConverterString.read(buf),
+ FfiConverterUInt.read(buf),
+ )
+ }
+
+ override fun allocationSize(value: SearchQuery) = (
+ FfiConverterString.allocationSize(value.`query`) +
+ FfiConverterUInt.allocationSize(value.`limit`)
+ )
+
+ override fun write(value: SearchQuery, buf: ByteBuffer) {
+ FfiConverterString.write(value.`query`, buf)
+ FfiConverterUInt.write(value.`limit`, buf)
+ }
+}
+
+
+
+data class SearchResponse (
+ val `total`: kotlin.UInt
+ ,
+ val `results`: List
+
+){
+
+
+
+ companion object
+}
+
+/**
+ * @suppress
+ */
+public object FfiConverterTypeSearchResponse: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): SearchResponse {
+ return SearchResponse(
+ FfiConverterUInt.read(buf),
+ FfiConverterSequenceTypeTableSearchResults.read(buf),
+ )
+ }
+
+ override fun allocationSize(value: SearchResponse) = (
+ FfiConverterUInt.allocationSize(value.`total`) +
+ FfiConverterSequenceTypeTableSearchResults.allocationSize(value.`results`)
+ )
+
+ override fun write(value: SearchResponse, buf: ByteBuffer) {
+ FfiConverterUInt.write(value.`total`, buf)
+ FfiConverterSequenceTypeTableSearchResults.write(value.`results`, buf)
+ }
+}
+
+
+
data class Signature (
val `r`: FieldElement
,
@@ -4510,6 +4682,47 @@ public object FfiConverterTypeStruct: FfiConverterRustBuffer {
+data class TableSearchResults (
+ val `table`: kotlin.String
+ ,
+ val `count`: kotlin.UInt
+ ,
+ val `matches`: List
+
+){
+
+
+
+ companion object
+}
+
+/**
+ * @suppress
+ */
+public object FfiConverterTypeTableSearchResults: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): TableSearchResults {
+ return TableSearchResults(
+ FfiConverterString.read(buf),
+ FfiConverterUInt.read(buf),
+ FfiConverterSequenceTypeSearchMatch.read(buf),
+ )
+ }
+
+ override fun allocationSize(value: TableSearchResults) = (
+ FfiConverterString.allocationSize(value.`table`) +
+ FfiConverterUInt.allocationSize(value.`count`) +
+ FfiConverterSequenceTypeSearchMatch.allocationSize(value.`matches`)
+ )
+
+ override fun write(value: TableSearchResults, buf: ByteBuffer) {
+ FfiConverterString.write(value.`table`, buf)
+ FfiConverterUInt.write(value.`count`, buf)
+ FfiConverterSequenceTypeSearchMatch.write(value.`matches`, buf)
+ }
+}
+
+
+
data class TaskProgress (
val `taskId`: kotlin.String
,
@@ -7116,6 +7329,38 @@ public object FfiConverterOptionalULong: FfiConverterRustBuffer {
+/**
+ * @suppress
+ */
+public object FfiConverterOptionalDouble: FfiConverterRustBuffer {
+ override fun read(buf: ByteBuffer): kotlin.Double? {
+ if (buf.get().toInt() == 0) {
+ return null
+ }
+ return FfiConverterDouble.read(buf)
+ }
+
+ override fun allocationSize(value: kotlin.Double?): ULong {
+ if (value == null) {
+ return 1UL
+ } else {
+ return 1UL + FfiConverterDouble.allocationSize(value)
+ }
+ }
+
+ override fun write(value: kotlin.Double?, buf: ByteBuffer) {
+ if (value == null) {
+ buf.put(0)
+ } else {
+ buf.put(1)
+ FfiConverterDouble.write(value, buf)
+ }
+ }
+}
+
+
+
+
/**
* @suppress
*/
@@ -7900,6 +8145,62 @@ public object FfiConverterSequenceTypePlayerAchievementProgress: FfiConverterRus
+/**
+ * @suppress
+ */
+public object FfiConverterSequenceTypeSearchField: FfiConverterRustBuffer> {
+ override fun read(buf: ByteBuffer): List {
+ val len = buf.getInt()
+ return List(len) {
+ FfiConverterTypeSearchField.read(buf)
+ }
+ }
+
+ override fun allocationSize(value: List): ULong {
+ val sizeForLength = 4UL
+ val sizeForItems = value.map { FfiConverterTypeSearchField.allocationSize(it) }.sum()
+ return sizeForLength + sizeForItems
+ }
+
+ override fun write(value: List, buf: ByteBuffer) {
+ buf.putInt(value.size)
+ value.iterator().forEach {
+ FfiConverterTypeSearchField.write(it, buf)
+ }
+ }
+}
+
+
+
+
+/**
+ * @suppress
+ */
+public object FfiConverterSequenceTypeSearchMatch: FfiConverterRustBuffer> {
+ override fun read(buf: ByteBuffer): List {
+ val len = buf.getInt()
+ return List(len) {
+ FfiConverterTypeSearchMatch.read(buf)
+ }
+ }
+
+ override fun allocationSize(value: List): ULong {
+ val sizeForLength = 4UL
+ val sizeForItems = value.map { FfiConverterTypeSearchMatch.allocationSize(it) }.sum()
+ return sizeForLength + sizeForItems
+ }
+
+ override fun write(value: List, buf: ByteBuffer) {
+ buf.putInt(value.size)
+ value.iterator().forEach {
+ FfiConverterTypeSearchMatch.write(it, buf)
+ }
+ }
+}
+
+
+
+
/**
* @suppress
*/
@@ -7984,6 +8285,34 @@ public object FfiConverterSequenceTypeStruct: FfiConverterRustBuffer> {
+ override fun read(buf: ByteBuffer): List {
+ val len = buf.getInt()
+ return List(len) {
+ FfiConverterTypeTableSearchResults.read(buf)
+ }
+ }
+
+ override fun allocationSize(value: List): ULong {
+ val sizeForLength = 4UL
+ val sizeForItems = value.map { FfiConverterTypeTableSearchResults.allocationSize(it) }.sum()
+ return sizeForLength + sizeForItems
+ }
+
+ override fun write(value: List, buf: ByteBuffer) {
+ buf.putInt(value.size)
+ value.iterator().forEach {
+ FfiConverterTypeTableSearchResults.write(it, buf)
+ }
+ }
+}
+
+
+
+
/**
* @suppress
*/
diff --git a/bindings/python/dojo.py b/bindings/python/dojo.py
index bf62c91..3d002b7 100644
--- a/bindings/python/dojo.py
+++ b/bindings/python/dojo.py
@@ -535,6 +535,8 @@ def _uniffi_check_api_checksums(lib):
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch() != 10937:
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
+ if lib.uniffi_dojo_uniffi_checksum_method_toriiclient_search() != 24151:
+ raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_dojo_uniffi_checksum_method_toriiclient_sql() != 38880:
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_dojo_uniffi_checksum_method_toriiclient_starknet_events() != 49963:
@@ -1016,6 +1018,12 @@ class _UniffiVTableCallbackInterfaceDojoTransactionUpdateCallback(ctypes.Structu
ctypes.POINTER(_UniffiRustCallStatus),
)
_UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_publish_message_batch.restype = _UniffiRustBuffer
+_UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_search.argtypes = (
+ ctypes.c_uint64,
+ _UniffiRustBuffer,
+ ctypes.POINTER(_UniffiRustCallStatus),
+)
+_UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_search.restype = _UniffiRustBuffer
_UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_sql.argtypes = (
ctypes.c_uint64,
_UniffiRustBuffer,
@@ -1145,6 +1153,9 @@ class _UniffiVTableCallbackInterfaceDojoTransactionUpdateCallback(ctypes.Structu
_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch.argtypes = (
)
_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch.restype = ctypes.c_uint16
+_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_search.argtypes = (
+)
+_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_search.restype = ctypes.c_uint16
_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_sql.argtypes = (
)
_UniffiLib.uniffi_dojo_uniffi_checksum_method_toriiclient_sql.restype = ctypes.c_uint16
@@ -6610,6 +6621,292 @@ def write(value, buf):
_UniffiFfiConverterSequenceString.write(value.models, buf)
_UniffiFfiConverterBoolean.write(value.historical, buf)
+@dataclass
+class SearchField:
+ def __init__(self, *, key:str, value:str):
+ self.key = key
+ self.value = value
+
+
+
+
+ def __str__(self):
+ return "SearchField(key={}, value={})".format(self.key, self.value)
+ def __eq__(self, other):
+ if self.key != other.key:
+ return False
+ if self.value != other.value:
+ return False
+ return True
+
+class _UniffiFfiConverterTypeSearchField(_UniffiConverterRustBuffer):
+ @staticmethod
+ def read(buf):
+ return SearchField(
+ key=_UniffiFfiConverterString.read(buf),
+ value=_UniffiFfiConverterString.read(buf),
+ )
+
+ @staticmethod
+ def check_lower(value):
+ _UniffiFfiConverterString.check_lower(value.key)
+ _UniffiFfiConverterString.check_lower(value.value)
+
+ @staticmethod
+ def write(value, buf):
+ _UniffiFfiConverterString.write(value.key, buf)
+ _UniffiFfiConverterString.write(value.value, buf)
+
+class _UniffiFfiConverterSequenceTypeSearchField(_UniffiConverterRustBuffer):
+ @classmethod
+ def check_lower(cls, value):
+ for item in value:
+ _UniffiFfiConverterTypeSearchField.check_lower(item)
+
+ @classmethod
+ def write(cls, value, buf):
+ items = len(value)
+ buf.write_i32(items)
+ for item in value:
+ _UniffiFfiConverterTypeSearchField.write(item, buf)
+
+ @classmethod
+ def read(cls, buf):
+ count = buf.read_i32()
+ if count < 0:
+ raise InternalError("Unexpected negative sequence length")
+
+ return [
+ _UniffiFfiConverterTypeSearchField.read(buf) for i in range(count)
+ ]
+
+class _UniffiFfiConverterOptionalFloat64(_UniffiConverterRustBuffer):
+ @classmethod
+ def check_lower(cls, value):
+ if value is not None:
+ _UniffiFfiConverterFloat64.check_lower(value)
+
+ @classmethod
+ def write(cls, value, buf):
+ if value is None:
+ buf.write_u8(0)
+ return
+
+ buf.write_u8(1)
+ _UniffiFfiConverterFloat64.write(value, buf)
+
+ @classmethod
+ def read(cls, buf):
+ flag = buf.read_u8()
+ if flag == 0:
+ return None
+ elif flag == 1:
+ return _UniffiFfiConverterFloat64.read(buf)
+ else:
+ raise InternalError("Unexpected flag byte for optional type")
+
+@dataclass
+class SearchMatch:
+ def __init__(self, *, id:str, fields:typing.List[SearchField], score:typing.Optional[float]):
+ self.id = id
+ self.fields = fields
+ self.score = score
+
+
+
+
+ def __str__(self):
+ return "SearchMatch(id={}, fields={}, score={})".format(self.id, self.fields, self.score)
+ def __eq__(self, other):
+ if self.id != other.id:
+ return False
+ if self.fields != other.fields:
+ return False
+ if self.score != other.score:
+ return False
+ return True
+
+class _UniffiFfiConverterTypeSearchMatch(_UniffiConverterRustBuffer):
+ @staticmethod
+ def read(buf):
+ return SearchMatch(
+ id=_UniffiFfiConverterString.read(buf),
+ fields=_UniffiFfiConverterSequenceTypeSearchField.read(buf),
+ score=_UniffiFfiConverterOptionalFloat64.read(buf),
+ )
+
+ @staticmethod
+ def check_lower(value):
+ _UniffiFfiConverterString.check_lower(value.id)
+ _UniffiFfiConverterSequenceTypeSearchField.check_lower(value.fields)
+ _UniffiFfiConverterOptionalFloat64.check_lower(value.score)
+
+ @staticmethod
+ def write(value, buf):
+ _UniffiFfiConverterString.write(value.id, buf)
+ _UniffiFfiConverterSequenceTypeSearchField.write(value.fields, buf)
+ _UniffiFfiConverterOptionalFloat64.write(value.score, buf)
+
+@dataclass
+class SearchQuery:
+ def __init__(self, *, query:str, limit:int):
+ self.query = query
+ self.limit = limit
+
+
+
+
+ def __str__(self):
+ return "SearchQuery(query={}, limit={})".format(self.query, self.limit)
+ def __eq__(self, other):
+ if self.query != other.query:
+ return False
+ if self.limit != other.limit:
+ return False
+ return True
+
+class _UniffiFfiConverterTypeSearchQuery(_UniffiConverterRustBuffer):
+ @staticmethod
+ def read(buf):
+ return SearchQuery(
+ query=_UniffiFfiConverterString.read(buf),
+ limit=_UniffiFfiConverterUInt32.read(buf),
+ )
+
+ @staticmethod
+ def check_lower(value):
+ _UniffiFfiConverterString.check_lower(value.query)
+ _UniffiFfiConverterUInt32.check_lower(value.limit)
+
+ @staticmethod
+ def write(value, buf):
+ _UniffiFfiConverterString.write(value.query, buf)
+ _UniffiFfiConverterUInt32.write(value.limit, buf)
+
+class _UniffiFfiConverterSequenceTypeSearchMatch(_UniffiConverterRustBuffer):
+ @classmethod
+ def check_lower(cls, value):
+ for item in value:
+ _UniffiFfiConverterTypeSearchMatch.check_lower(item)
+
+ @classmethod
+ def write(cls, value, buf):
+ items = len(value)
+ buf.write_i32(items)
+ for item in value:
+ _UniffiFfiConverterTypeSearchMatch.write(item, buf)
+
+ @classmethod
+ def read(cls, buf):
+ count = buf.read_i32()
+ if count < 0:
+ raise InternalError("Unexpected negative sequence length")
+
+ return [
+ _UniffiFfiConverterTypeSearchMatch.read(buf) for i in range(count)
+ ]
+
+@dataclass
+class TableSearchResults:
+ def __init__(self, *, table:str, count:int, matches:typing.List[SearchMatch]):
+ self.table = table
+ self.count = count
+ self.matches = matches
+
+
+
+
+ def __str__(self):
+ return "TableSearchResults(table={}, count={}, matches={})".format(self.table, self.count, self.matches)
+ def __eq__(self, other):
+ if self.table != other.table:
+ return False
+ if self.count != other.count:
+ return False
+ if self.matches != other.matches:
+ return False
+ return True
+
+class _UniffiFfiConverterTypeTableSearchResults(_UniffiConverterRustBuffer):
+ @staticmethod
+ def read(buf):
+ return TableSearchResults(
+ table=_UniffiFfiConverterString.read(buf),
+ count=_UniffiFfiConverterUInt32.read(buf),
+ matches=_UniffiFfiConverterSequenceTypeSearchMatch.read(buf),
+ )
+
+ @staticmethod
+ def check_lower(value):
+ _UniffiFfiConverterString.check_lower(value.table)
+ _UniffiFfiConverterUInt32.check_lower(value.count)
+ _UniffiFfiConverterSequenceTypeSearchMatch.check_lower(value.matches)
+
+ @staticmethod
+ def write(value, buf):
+ _UniffiFfiConverterString.write(value.table, buf)
+ _UniffiFfiConverterUInt32.write(value.count, buf)
+ _UniffiFfiConverterSequenceTypeSearchMatch.write(value.matches, buf)
+
+class _UniffiFfiConverterSequenceTypeTableSearchResults(_UniffiConverterRustBuffer):
+ @classmethod
+ def check_lower(cls, value):
+ for item in value:
+ _UniffiFfiConverterTypeTableSearchResults.check_lower(item)
+
+ @classmethod
+ def write(cls, value, buf):
+ items = len(value)
+ buf.write_i32(items)
+ for item in value:
+ _UniffiFfiConverterTypeTableSearchResults.write(item, buf)
+
+ @classmethod
+ def read(cls, buf):
+ count = buf.read_i32()
+ if count < 0:
+ raise InternalError("Unexpected negative sequence length")
+
+ return [
+ _UniffiFfiConverterTypeTableSearchResults.read(buf) for i in range(count)
+ ]
+
+@dataclass
+class SearchResponse:
+ def __init__(self, *, total:int, results:typing.List[TableSearchResults]):
+ self.total = total
+ self.results = results
+
+
+
+
+ def __str__(self):
+ return "SearchResponse(total={}, results={})".format(self.total, self.results)
+ def __eq__(self, other):
+ if self.total != other.total:
+ return False
+ if self.results != other.results:
+ return False
+ return True
+
+class _UniffiFfiConverterTypeSearchResponse(_UniffiConverterRustBuffer):
+ @staticmethod
+ def read(buf):
+ return SearchResponse(
+ total=_UniffiFfiConverterUInt32.read(buf),
+ results=_UniffiFfiConverterSequenceTypeTableSearchResults.read(buf),
+ )
+
+ @staticmethod
+ def check_lower(value):
+ _UniffiFfiConverterUInt32.check_lower(value.total)
+ _UniffiFfiConverterSequenceTypeTableSearchResults.check_lower(value.results)
+
+ @staticmethod
+ def write(value, buf):
+ _UniffiFfiConverterUInt32.write(value.total, buf)
+ _UniffiFfiConverterSequenceTypeTableSearchResults.write(value.results, buf)
+
@dataclass
class Signature:
def __init__(self, *, r:FieldElement, s:FieldElement):
@@ -8224,6 +8521,8 @@ def publish_message(self, message: Message) -> str:
raise NotImplementedError
def publish_message_batch(self, messages: typing.List[Message]) -> typing.List[str]:
raise NotImplementedError
+ def search(self, query: SearchQuery) -> SearchResponse:
+ raise NotImplementedError
def sql(self, query: str) -> typing.List[SqlRow]:
raise NotImplementedError
def starknet_events(self, query: EventQuery) -> PageEvent:
@@ -8469,6 +8768,21 @@ def publish_message_batch(self, messages: typing.List[Message]) -> typing.List[s
*_uniffi_lowered_args,
)
return _uniffi_lift_return(_uniffi_ffi_result)
+ def search(self, query: SearchQuery) -> SearchResponse:
+
+ _UniffiFfiConverterTypeSearchQuery.check_lower(query)
+ _uniffi_lowered_args = (
+ self._uniffi_clone_handle(),
+ _UniffiFfiConverterTypeSearchQuery.lower(query),
+ )
+ _uniffi_lift_return = _UniffiFfiConverterTypeSearchResponse.lift
+ _uniffi_error_converter = _UniffiFfiConverterTypeDojoError
+ _uniffi_ffi_result = _uniffi_rust_call_with_error(
+ _uniffi_error_converter,
+ _UniffiLib.uniffi_dojo_uniffi_fn_method_toriiclient_search,
+ *_uniffi_lowered_args,
+ )
+ return _uniffi_lift_return(_uniffi_ffi_result)
def sql(self, query: str) -> typing.List[SqlRow]:
_UniffiFfiConverterString.check_lower(query)
@@ -8790,6 +9104,11 @@ def write(cls, value: ToriiClient, buf: _UniffiRustBuffer):
"PageTransaction",
"PlayerAchievementQuery",
"Query",
+ "SearchField",
+ "SearchMatch",
+ "SearchQuery",
+ "TableSearchResults",
+ "SearchResponse",
"Signature",
"SqlField",
"SqlRow",
diff --git a/bindings/swift/DojoEngine.swift b/bindings/swift/DojoEngine.swift
index 56a1a5e..f53c70d 100644
--- a/bindings/swift/DojoEngine.swift
+++ b/bindings/swift/DojoEngine.swift
@@ -658,6 +658,8 @@ public protocol ToriiClientProtocol: AnyObject, Sendable {
func publishMessageBatch(messages: [Message]) throws -> [String]
+ func search(query: SearchQuery) throws -> SearchResponse
+
func sql(query: String) throws -> [SqlRow]
func starknetEvents(query: EventQuery) throws -> PageEvent
@@ -848,6 +850,15 @@ open func publishMessageBatch(messages: [Message])throws -> [String] {
})
}
+open func search(query: SearchQuery)throws -> SearchResponse {
+ return try FfiConverterTypeSearchResponse_lift(try rustCallWithError(FfiConverterTypeDojoError_lift) {
+ uniffi_dojo_uniffi_fn_method_toriiclient_search(
+ self.uniffiCloneHandle(),
+ FfiConverterTypeSearchQuery_lower(query),$0
+ )
+})
+}
+
open func sql(query: String)throws -> [SqlRow] {
return try FfiConverterSequenceTypeSqlRow.lift(try rustCallWithError(FfiConverterTypeDojoError_lift) {
uniffi_dojo_uniffi_fn_method_toriiclient_sql(
@@ -3742,6 +3753,218 @@ public func FfiConverterTypeQuery_lower(_ value: Query) -> RustBuffer {
}
+public struct SearchField: Equatable, Hashable {
+ public let key: String
+ public let value: String
+
+ // Default memberwise initializers are never public by default, so we
+ // declare one manually.
+ public init(key: String, value: String) {
+ self.key = key
+ self.value = value
+ }
+
+
+}
+
+#if compiler(>=6)
+extension SearchField: Sendable {}
+#endif
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public struct FfiConverterTypeSearchField: FfiConverterRustBuffer {
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SearchField {
+ return
+ try SearchField(
+ key: FfiConverterString.read(from: &buf),
+ value: FfiConverterString.read(from: &buf)
+ )
+ }
+
+ public static func write(_ value: SearchField, into buf: inout [UInt8]) {
+ FfiConverterString.write(value.key, into: &buf)
+ FfiConverterString.write(value.value, into: &buf)
+ }
+}
+
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchField_lift(_ buf: RustBuffer) throws -> SearchField {
+ return try FfiConverterTypeSearchField.lift(buf)
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchField_lower(_ value: SearchField) -> RustBuffer {
+ return FfiConverterTypeSearchField.lower(value)
+}
+
+
+public struct SearchMatch: Equatable, Hashable {
+ public let id: String
+ public let fields: [SearchField]
+ public let score: Double?
+
+ // Default memberwise initializers are never public by default, so we
+ // declare one manually.
+ public init(id: String, fields: [SearchField], score: Double?) {
+ self.id = id
+ self.fields = fields
+ self.score = score
+ }
+
+
+}
+
+#if compiler(>=6)
+extension SearchMatch: Sendable {}
+#endif
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public struct FfiConverterTypeSearchMatch: FfiConverterRustBuffer {
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SearchMatch {
+ return
+ try SearchMatch(
+ id: FfiConverterString.read(from: &buf),
+ fields: FfiConverterSequenceTypeSearchField.read(from: &buf),
+ score: FfiConverterOptionDouble.read(from: &buf)
+ )
+ }
+
+ public static func write(_ value: SearchMatch, into buf: inout [UInt8]) {
+ FfiConverterString.write(value.id, into: &buf)
+ FfiConverterSequenceTypeSearchField.write(value.fields, into: &buf)
+ FfiConverterOptionDouble.write(value.score, into: &buf)
+ }
+}
+
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchMatch_lift(_ buf: RustBuffer) throws -> SearchMatch {
+ return try FfiConverterTypeSearchMatch.lift(buf)
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchMatch_lower(_ value: SearchMatch) -> RustBuffer {
+ return FfiConverterTypeSearchMatch.lower(value)
+}
+
+
+public struct SearchQuery: Equatable, Hashable {
+ public let query: String
+ public let limit: UInt32
+
+ // Default memberwise initializers are never public by default, so we
+ // declare one manually.
+ public init(query: String, limit: UInt32) {
+ self.query = query
+ self.limit = limit
+ }
+
+
+}
+
+#if compiler(>=6)
+extension SearchQuery: Sendable {}
+#endif
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public struct FfiConverterTypeSearchQuery: FfiConverterRustBuffer {
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SearchQuery {
+ return
+ try SearchQuery(
+ query: FfiConverterString.read(from: &buf),
+ limit: FfiConverterUInt32.read(from: &buf)
+ )
+ }
+
+ public static func write(_ value: SearchQuery, into buf: inout [UInt8]) {
+ FfiConverterString.write(value.query, into: &buf)
+ FfiConverterUInt32.write(value.limit, into: &buf)
+ }
+}
+
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchQuery_lift(_ buf: RustBuffer) throws -> SearchQuery {
+ return try FfiConverterTypeSearchQuery.lift(buf)
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchQuery_lower(_ value: SearchQuery) -> RustBuffer {
+ return FfiConverterTypeSearchQuery.lower(value)
+}
+
+
+public struct SearchResponse: Equatable, Hashable {
+ public let total: UInt32
+ public let results: [TableSearchResults]
+
+ // Default memberwise initializers are never public by default, so we
+ // declare one manually.
+ public init(total: UInt32, results: [TableSearchResults]) {
+ self.total = total
+ self.results = results
+ }
+
+
+}
+
+#if compiler(>=6)
+extension SearchResponse: Sendable {}
+#endif
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public struct FfiConverterTypeSearchResponse: FfiConverterRustBuffer {
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SearchResponse {
+ return
+ try SearchResponse(
+ total: FfiConverterUInt32.read(from: &buf),
+ results: FfiConverterSequenceTypeTableSearchResults.read(from: &buf)
+ )
+ }
+
+ public static func write(_ value: SearchResponse, into buf: inout [UInt8]) {
+ FfiConverterUInt32.write(value.total, into: &buf)
+ FfiConverterSequenceTypeTableSearchResults.write(value.results, into: &buf)
+ }
+}
+
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchResponse_lift(_ buf: RustBuffer) throws -> SearchResponse {
+ return try FfiConverterTypeSearchResponse.lift(buf)
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeSearchResponse_lower(_ value: SearchResponse) -> RustBuffer {
+ return FfiConverterTypeSearchResponse.lower(value)
+}
+
+
public struct Signature: Equatable, Hashable {
public let r: FieldElement
public let s: FieldElement
@@ -3946,6 +4169,62 @@ public func FfiConverterTypeStruct_lower(_ value: Struct) -> RustBuffer {
}
+public struct TableSearchResults: Equatable, Hashable {
+ public let table: String
+ public let count: UInt32
+ public let matches: [SearchMatch]
+
+ // Default memberwise initializers are never public by default, so we
+ // declare one manually.
+ public init(table: String, count: UInt32, matches: [SearchMatch]) {
+ self.table = table
+ self.count = count
+ self.matches = matches
+ }
+
+
+}
+
+#if compiler(>=6)
+extension TableSearchResults: Sendable {}
+#endif
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public struct FfiConverterTypeTableSearchResults: FfiConverterRustBuffer {
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> TableSearchResults {
+ return
+ try TableSearchResults(
+ table: FfiConverterString.read(from: &buf),
+ count: FfiConverterUInt32.read(from: &buf),
+ matches: FfiConverterSequenceTypeSearchMatch.read(from: &buf)
+ )
+ }
+
+ public static func write(_ value: TableSearchResults, into buf: inout [UInt8]) {
+ FfiConverterString.write(value.table, into: &buf)
+ FfiConverterUInt32.write(value.count, into: &buf)
+ FfiConverterSequenceTypeSearchMatch.write(value.matches, into: &buf)
+ }
+}
+
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeTableSearchResults_lift(_ buf: RustBuffer) throws -> TableSearchResults {
+ return try FfiConverterTypeTableSearchResults.lift(buf)
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+public func FfiConverterTypeTableSearchResults_lower(_ value: TableSearchResults) -> RustBuffer {
+ return FfiConverterTypeTableSearchResults.lower(value)
+}
+
+
public struct TaskProgress: Equatable, Hashable {
public let taskId: String
public let count: UInt32
@@ -7041,6 +7320,30 @@ fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer {
}
}
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+fileprivate struct FfiConverterOptionDouble: FfiConverterRustBuffer {
+ typealias SwiftType = Double?
+
+ public static func write(_ value: SwiftType, into buf: inout [UInt8]) {
+ guard let value = value else {
+ writeInt(&buf, Int8(0))
+ return
+ }
+ writeInt(&buf, Int8(1))
+ FfiConverterDouble.write(value, into: &buf)
+ }
+
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType {
+ switch try readInt(&buf) as Int8 {
+ case 0: return nil
+ case 1: return try FfiConverterDouble.read(from: &buf)
+ default: throw UniffiInternalError.unexpectedOptionalTag
+ }
+ }
+}
+
#if swift(>=5.8)
@_documentation(visibility: private)
#endif
@@ -7709,6 +8012,56 @@ fileprivate struct FfiConverterSequenceTypePlayerAchievementProgress: FfiConvert
}
}
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+fileprivate struct FfiConverterSequenceTypeSearchField: FfiConverterRustBuffer {
+ typealias SwiftType = [SearchField]
+
+ public static func write(_ value: [SearchField], into buf: inout [UInt8]) {
+ let len = Int32(value.count)
+ writeInt(&buf, len)
+ for item in value {
+ FfiConverterTypeSearchField.write(item, into: &buf)
+ }
+ }
+
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [SearchField] {
+ let len: Int32 = try readInt(&buf)
+ var seq = [SearchField]()
+ seq.reserveCapacity(Int(len))
+ for _ in 0 ..< len {
+ seq.append(try FfiConverterTypeSearchField.read(from: &buf))
+ }
+ return seq
+ }
+}
+
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+fileprivate struct FfiConverterSequenceTypeSearchMatch: FfiConverterRustBuffer {
+ typealias SwiftType = [SearchMatch]
+
+ public static func write(_ value: [SearchMatch], into buf: inout [UInt8]) {
+ let len = Int32(value.count)
+ writeInt(&buf, len)
+ for item in value {
+ FfiConverterTypeSearchMatch.write(item, into: &buf)
+ }
+ }
+
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [SearchMatch] {
+ let len: Int32 = try readInt(&buf)
+ var seq = [SearchMatch]()
+ seq.reserveCapacity(Int(len))
+ for _ in 0 ..< len {
+ seq.append(try FfiConverterTypeSearchMatch.read(from: &buf))
+ }
+ return seq
+ }
+}
+
#if swift(>=5.8)
@_documentation(visibility: private)
#endif
@@ -7784,6 +8137,31 @@ fileprivate struct FfiConverterSequenceTypeStruct: FfiConverterRustBuffer {
}
}
+#if swift(>=5.8)
+@_documentation(visibility: private)
+#endif
+fileprivate struct FfiConverterSequenceTypeTableSearchResults: FfiConverterRustBuffer {
+ typealias SwiftType = [TableSearchResults]
+
+ public static func write(_ value: [TableSearchResults], into buf: inout [UInt8]) {
+ let len = Int32(value.count)
+ writeInt(&buf, len)
+ for item in value {
+ FfiConverterTypeTableSearchResults.write(item, into: &buf)
+ }
+ }
+
+ public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [TableSearchResults] {
+ let len: Int32 = try readInt(&buf)
+ var seq = [TableSearchResults]()
+ seq.reserveCapacity(Int(len))
+ for _ in 0 ..< len {
+ seq.append(try FfiConverterTypeTableSearchResults.read(from: &buf))
+ }
+ return seq
+ }
+}
+
#if swift(>=5.8)
@_documentation(visibility: private)
#endif
@@ -8295,6 +8673,9 @@ private let initializationResult: InitializationResult = {
if (uniffi_dojo_uniffi_checksum_method_toriiclient_publish_message_batch() != 2146) {
return InitializationResult.apiChecksumMismatch
}
+ if (uniffi_dojo_uniffi_checksum_method_toriiclient_search() != 20622) {
+ return InitializationResult.apiChecksumMismatch
+ }
if (uniffi_dojo_uniffi_checksum_method_toriiclient_sql() != 38286) {
return InitializationResult.apiChecksumMismatch
}
diff --git a/crates/c/src/lib.rs b/crates/c/src/lib.rs
index ce54217..d476450 100644
--- a/crates/c/src/lib.rs
+++ b/crates/c/src/lib.rs
@@ -1893,6 +1893,28 @@ mod ffi {
}
}
+ /// Performs a full-text search across indexed entities using FTS5
+ ///
+ /// # Parameters
+ /// * `client` - Pointer to ToriiClient instance
+ /// * `query` - Search query containing the search text and limit
+ ///
+ /// # Returns
+ /// Result containing SearchResponse with results grouped by table or error
+ #[no_mangle]
+ pub unsafe extern "C" fn client_search(
+ client: *mut ToriiClient,
+ query: types::SearchQuery,
+ ) -> Result {
+ let query = query.into();
+ let search_future = unsafe { (*client).inner.search(query) };
+
+ match RUNTIME.block_on(search_future) {
+ Ok(response) => Result::Ok(response.into()),
+ Err(e) => Result::Err(e.into()),
+ }
+ }
+
/// Serializes a string into a byte array
///
/// # Parameters
diff --git a/crates/c/src/types.rs b/crates/c/src/types.rs
index b12bea9..c1ff062 100644
--- a/crates/c/src/types.rs
+++ b/crates/c/src/types.rs
@@ -2094,6 +2094,85 @@ impl From for AchievementProgression {
}
}
+// Search types
+#[derive(Clone, Debug)]
+#[repr(C)]
+pub struct SearchQuery {
+ pub query: *const c_char,
+ pub limit: u32,
+}
+
+impl From for torii_proto::SearchQuery {
+ fn from(val: SearchQuery) -> Self {
+ let query = unsafe { CStr::from_ptr(val.query).to_string_lossy().to_string() };
+ Self { query, limit: val.limit }
+ }
+}
+
+#[derive(Clone, Debug)]
+#[repr(C)]
+pub struct SearchMatchField {
+ pub key: *const c_char,
+ pub value: *const c_char,
+}
+
+#[derive(Clone, Debug)]
+#[repr(C)]
+pub struct SearchMatch {
+ pub id: *const c_char,
+ pub fields: CArray,
+ pub score: COption,
+}
+
+impl From for SearchMatch {
+ fn from(val: torii_proto::SearchMatch) -> Self {
+ let fields: Vec = val
+ .fields
+ .into_iter()
+ .map(|(k, v)| SearchMatchField {
+ key: CString::new(k).unwrap().into_raw(),
+ value: CString::new(v).unwrap().into_raw(),
+ })
+ .collect();
+ Self {
+ id: CString::new(val.id).unwrap().into_raw(),
+ fields: fields.into(),
+ score: val.score.into(),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+#[repr(C)]
+pub struct TableSearchResults {
+ pub table: *const c_char,
+ pub count: u32,
+ pub matches: CArray,
+}
+
+impl From for TableSearchResults {
+ fn from(val: torii_proto::TableSearchResults) -> Self {
+ Self {
+ table: CString::new(val.table).unwrap().into_raw(),
+ count: val.count,
+ matches: val.matches.into(),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+#[repr(C)]
+pub struct SearchResponse {
+ pub total: u32,
+ pub results: CArray,
+}
+
+impl From for SearchResponse {
+ fn from(val: torii_proto::SearchResponse) -> Self {
+ Self { total: val.total, results: val.results.into() }
+ }
+}
+
// C-specific types for accounts and providers
pub struct Provider(
pub(crate) std::sync::Arc<
diff --git a/crates/uniffi/src/dojo.udl b/crates/uniffi/src/dojo.udl
index 3d6a654..69a9e57 100644
--- a/crates/uniffi/src/dojo.udl
+++ b/crates/uniffi/src/dojo.udl
@@ -637,6 +637,34 @@ dictionary PageEvent {
string? next_cursor;
};
+// Search types
+dictionary SearchQuery {
+ string query;
+ u32 limit;
+};
+
+dictionary SearchField {
+ string key;
+ string value;
+};
+
+dictionary SearchMatch {
+ string id;
+ sequence fields;
+ f64? score;
+};
+
+dictionary TableSearchResults {
+ string table;
+ u32 count;
+ sequence matches;
+};
+
+dictionary SearchResponse {
+ u32 total;
+ sequence results;
+};
+
// Main Dojo client interface
interface ToriiClient {
// Constructor - create a new client with default config (4MB max message size)
@@ -719,6 +747,10 @@ interface ToriiClient {
[Throws=DojoError]
sequence sql(string query);
+ // Perform full-text search across indexed entities
+ [Throws=DojoError]
+ SearchResponse search(SearchQuery query);
+
// Subscription methods
// Subscribe to entity updates
[Throws=DojoError]
diff --git a/crates/uniffi/src/uniffi/client.rs b/crates/uniffi/src/uniffi/client.rs
index afd17ca..35b06ff 100644
--- a/crates/uniffi/src/uniffi/client.rs
+++ b/crates/uniffi/src/uniffi/client.rs
@@ -318,6 +318,21 @@ impl ToriiClient {
rows.into_iter().map(|r| r.try_into()).collect()
}
+ /// Perform a full-text search across indexed entities using FTS5.
+ ///
+ /// # Arguments
+ /// * `query` - Search query containing the search text and limit
+ ///
+ /// # Returns
+ /// A `SearchResponse` containing results grouped by table with relevance scores
+ pub fn search(&self, query: SearchQuery) -> Result {
+ let inner = self.inner.clone();
+ runtime()
+ .block_on(inner.search(query.into()))
+ .map(Into::into)
+ .map_err(|e| DojoError::QueryError { message: e.to_string() })
+ }
+
/// Subscribe to entity updates
pub fn subscribe_entity_updates(
&self,
diff --git a/crates/uniffi/src/uniffi/types/mod.rs b/crates/uniffi/src/uniffi/types/mod.rs
index 3352270..b5b0238 100644
--- a/crates/uniffi/src/uniffi/types/mod.rs
+++ b/crates/uniffi/src/uniffi/types/mod.rs
@@ -11,6 +11,7 @@ pub mod entity;
pub mod event;
pub mod query;
pub mod schema;
+pub mod search;
pub mod token;
pub mod transaction;
@@ -26,5 +27,6 @@ pub use entity::*;
pub use event::*;
pub use query::*;
pub use schema::*;
+pub use search::*;
pub use token::*;
pub use transaction::*;
diff --git a/crates/uniffi/src/uniffi/types/search.rs b/crates/uniffi/src/uniffi/types/search.rs
new file mode 100644
index 0000000..5e79b37
--- /dev/null
+++ b/crates/uniffi/src/uniffi/types/search.rs
@@ -0,0 +1,63 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct SearchQuery {
+ pub query: String,
+ pub limit: u32,
+}
+
+impl From for torii_proto::SearchQuery {
+ fn from(val: SearchQuery) -> Self {
+ Self { query: val.query, limit: val.limit }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct SearchField {
+ pub key: String,
+ pub value: String,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct SearchMatch {
+ pub id: String,
+ pub fields: Vec,
+ pub score: Option,
+}
+
+impl From for SearchMatch {
+ fn from(val: torii_proto::SearchMatch) -> Self {
+ let fields =
+ val.fields.into_iter().map(|(key, value)| SearchField { key, value }).collect();
+ Self { id: val.id, fields, score: val.score }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct TableSearchResults {
+ pub table: String,
+ pub count: u32,
+ pub matches: Vec,
+}
+
+impl From for TableSearchResults {
+ fn from(val: torii_proto::TableSearchResults) -> Self {
+ Self {
+ table: val.table,
+ count: val.count,
+ matches: val.matches.into_iter().map(Into::into).collect(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct SearchResponse {
+ pub total: u32,
+ pub results: Vec,
+}
+
+impl From for SearchResponse {
+ fn from(val: torii_proto::SearchResponse) -> Self {
+ Self { total: val.total, results: val.results.into_iter().map(Into::into).collect() }
+ }
+}
diff --git a/crates/wasm/src/lib.rs b/crates/wasm/src/lib.rs
index d401a76..e7af454 100644
--- a/crates/wasm/src/lib.rs
+++ b/crates/wasm/src/lib.rs
@@ -34,10 +34,10 @@ use types::{
ActivityQuery, AggregationEntry, AggregationQuery, Aggregations, BlockId, Call, Calls, Clause,
ClientConfig, Contract, ContractQuery, Contracts, ControllerQuery, Controllers, Entities,
Entity, KeysClauses, Message, PlayerAchievementQuery, PlayerAchievements, Provider, Query,
- Signature, Subscription, Token, TokenBalance, TokenBalanceQuery, TokenBalances,
- TokenContractQuery, TokenContracts, TokenQuery, TokenTransfer, TokenTransferQuery,
- TokenTransfers, Tokens, ToriiClient, Transaction, TransactionFilter, TransactionQuery,
- Transactions, WasmU256,
+ SearchQuery, SearchResponse, Signature, Subscription, Token, TokenBalance, TokenBalanceQuery,
+ TokenBalances, TokenContractQuery, TokenContracts, TokenQuery, TokenTransfer,
+ TokenTransferQuery, TokenTransfers, Tokens, ToriiClient, Transaction, TransactionFilter,
+ TransactionQuery, Transactions, WasmU256,
};
const JSON_COMPAT_SERIALIZER: serde_wasm_bindgen::Serializer =
@@ -2163,6 +2163,36 @@ impl ToriiClient {
.map_err(|err| JsValue::from(format!("failed to update subscription: {err}")))
}
+ /// Perform a full-text search across indexed entities using FTS5.
+ ///
+ /// # Parameters
+ /// * `query` - Search query containing the search text and limit
+ ///
+ /// # Returns
+ /// A `SearchResponse` containing results grouped by table with relevance scores
+ ///
+ /// # Example
+ /// ```javascript
+ /// const results = await client.search({ query: "dragon", limit: 10 });
+ /// console.log(`Found ${results.total} total matches`);
+ /// for (const tableResults of results.results) {
+ /// console.log(`Table ${tableResults.table}: ${tableResults.count} matches`);
+ /// for (const match of tableResults.matches) {
+ /// console.log(` ID: ${match.id}, Score: ${match.score}`);
+ /// for (const [field, value] of Object.entries(match.fields)) {
+ /// console.log(` ${field}: ${value}`);
+ /// }
+ /// }
+ /// }
+ /// ```
+ #[wasm_bindgen(js_name = search)]
+ pub async fn search(&self, query: SearchQuery) -> Result {
+ let response =
+ self.inner.search(query.into()).await.map_err(|err| JsValue::from(err.to_string()))?;
+
+ Ok(response.into())
+ }
+
/// Publishes a message to the network
///
/// # Parameters
diff --git a/crates/wasm/src/types.rs b/crates/wasm/src/types.rs
index eaef1ac..5aefe76 100644
--- a/crates/wasm/src/types.rs
+++ b/crates/wasm/src/types.rs
@@ -1455,6 +1455,65 @@ pub struct Achievements(pub Page);
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct PlayerAchievements(pub Page);
+// Search types
+#[derive(Tsify, Serialize, Deserialize, Debug)]
+#[tsify(into_wasm_abi, from_wasm_abi)]
+pub struct SearchQuery {
+ pub query: String,
+ pub limit: u32,
+}
+
+impl From for torii_proto::SearchQuery {
+ fn from(value: SearchQuery) -> Self {
+ Self { query: value.query, limit: value.limit }
+ }
+}
+
+#[derive(Tsify, Serialize, Deserialize, Debug)]
+#[tsify(into_wasm_abi, from_wasm_abi, hashmap_as_object)]
+pub struct SearchMatch {
+ pub id: String,
+ pub fields: HashMap,
+ pub score: Option,
+}
+
+impl From for SearchMatch {
+ fn from(value: torii_proto::SearchMatch) -> Self {
+ Self { id: value.id, fields: value.fields, score: value.score }
+ }
+}
+
+#[derive(Tsify, Serialize, Deserialize, Debug)]
+#[tsify(into_wasm_abi, from_wasm_abi)]
+pub struct TableSearchResults {
+ pub table: String,
+ pub count: u32,
+ pub matches: Vec,
+}
+
+impl From for TableSearchResults {
+ fn from(value: torii_proto::TableSearchResults) -> Self {
+ Self {
+ table: value.table,
+ count: value.count,
+ matches: value.matches.into_iter().map(Into::into).collect(),
+ }
+ }
+}
+
+#[derive(Tsify, Serialize, Deserialize, Debug)]
+#[tsify(into_wasm_abi, from_wasm_abi)]
+pub struct SearchResponse {
+ pub total: u32,
+ pub results: Vec,
+}
+
+impl From for SearchResponse {
+ fn from(value: torii_proto::SearchResponse) -> Self {
+ Self { total: value.total, results: value.results.into_iter().map(Into::into).collect() }
+ }
+}
+
// WASM-specific client types
#[wasm_bindgen]
pub struct ToriiClient {
diff --git a/scripts/build_everything.sh b/scripts/build_everything.sh
new file mode 100755
index 0000000..0e987e1
--- /dev/null
+++ b/scripts/build_everything.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+# Master script to build ALL Dojo bindings (UniFFI, C, and WASM)
+
+set -e
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+cd "$SCRIPT_DIR/.."
+
+echo "========================================"
+echo "Building ALL Dojo Bindings"
+echo "========================================"
+echo ""
+
+# Build UniFFI bindings (Swift, Kotlin, Python, C#, Go)
+echo "🔧 Building UniFFI bindings..."
+./scripts/build_all_bindings.sh
+echo ""
+
+# Build C bindings
+echo "🔧 Building C bindings..."
+./scripts/build_c.sh
+echo ""
+
+# Build C++ bindings
+echo "🔧 Building C++ bindings..."
+./scripts/build_cpp.sh
+echo ""
+
+# Build WASM bindings
+echo "🔧 Building WASM bindings..."
+./scripts/build_wasm.sh
+echo ""
+
+echo "✅ ALL BINDINGS GENERATED SUCCESSFULLY!"
+echo ""
+echo "Generated bindings:"
+echo " 📁 UniFFI bindings:"
+echo " - bindings/swift/"
+echo " - bindings/kotlin/"
+echo " - bindings/python/"
+echo " - bindings/csharp/"
+echo " - bindings/go/"
+echo " 📁 C/C++ bindings:"
+echo " - bindings/c/dojo.h"
+echo " - bindings/cpp/"
+echo " 📁 WASM bindings:"
+echo " - pkg/"
+echo ""
+
diff --git a/src/bin/uniffi-bindgen-kotlin.rs b/src/bin/uniffi-bindgen-kotlin.rs
deleted file mode 100644
index f9a2f4d..0000000
--- a/src/bin/uniffi-bindgen-kotlin.rs
+++ /dev/null
@@ -1,55 +0,0 @@
-use std::env;
-use std::process;
-use uniffi_bindgen::bindings::KotlinBindingGenerator;
-use uniffi_bindgen::library_mode::generate_bindings;
-use camino::Utf8PathBuf;
-
-fn main() {
- let args: Vec = env::args().collect();
-
- if args.len() < 3 {
- eprintln!("UniFFI Kotlin Binding Generator");
- eprintln!();
- eprintln!("Usage: {} ", args[0]);
- eprintln!();
- eprintln!("Example:");
- eprintln!(" {} target/release/libdojo_c.dylib bindings/kotlin", args[0]);
- eprintln!();
- process::exit(1);
- }
-
- let library_path = Utf8PathBuf::from(&args[1]);
- let out_dir = Utf8PathBuf::from(&args[2]);
-
- if !library_path.exists() {
- eprintln!("Error: Library file not found: {}", library_path);
- eprintln!("Build the library first with: cargo build --release");
- process::exit(1);
- }
-
- println!("Generating Kotlin bindings...");
- println!("Library: {}", library_path);
- println!("Output: {}", out_dir);
-
- // Use library mode with Kotlin binding generator
- let config_supplier = uniffi_bindgen::EmptyCrateConfigSupplier;
-
- match generate_bindings(
- &library_path,
- None, // crate_name (auto-detect)
- &KotlinBindingGenerator,
- &config_supplier,
- None, // config_file_override
- &out_dir,
- false, // try_format_code
- ) {
- Ok(_) => {
- println!("✓ Kotlin bindings generated successfully in {}", out_dir);
- }
- Err(e) => {
- eprintln!("Error generating bindings: {}", e);
- process::exit(1);
- }
- }
-}
-
diff --git a/src/bin/uniffi-bindgen-python.rs b/src/bin/uniffi-bindgen-python.rs
deleted file mode 100644
index e4f37bb..0000000
--- a/src/bin/uniffi-bindgen-python.rs
+++ /dev/null
@@ -1,65 +0,0 @@
-use std::env;
-use std::process;
-use uniffi_bindgen::bindings::python::run_pipeline;
-use uniffi_bindgen::cargo_metadata::CrateConfigSupplier;
-use uniffi_bindgen::pipeline::initial::Root;
-use camino::Utf8PathBuf;
-
-fn main() {
- let args: Vec = env::args().collect();
-
- if args.len() < 3 {
- eprintln!("UniFFI Python Binding Generator");
- eprintln!();
- eprintln!("Usage: {} ", args[0]);
- eprintln!();
- eprintln!("Example:");
- eprintln!(" {} target/release/libdojo_c.dylib bindings/python", args[0]);
- eprintln!();
- process::exit(1);
- }
-
- let library_path = Utf8PathBuf::from(&args[1]);
- let out_dir = Utf8PathBuf::from(&args[2]);
-
- if !library_path.exists() {
- eprintln!("Error: Library file not found: {}", library_path);
- eprintln!("Build the library first with: cargo build --release");
- process::exit(1);
- }
-
- println!("Generating Python bindings...");
- println!("Library: {}", library_path);
- println!("Output: {}", out_dir);
-
- // Use cargo metadata to get crate configuration
- let metadata = match cargo_metadata::MetadataCommand::new().exec() {
- Ok(m) => m,
- Err(e) => {
- eprintln!("Error getting cargo metadata: {}", e);
- eprintln!("Make sure you're running this from a cargo project directory");
- process::exit(1);
- }
- };
-
- let config_supplier = CrateConfigSupplier::from(metadata);
-
- match Root::from_library(config_supplier, &library_path, None) {
- Ok(root) => {
- match run_pipeline(root, &out_dir) {
- Ok(_) => {
- println!("✓ Python bindings generated successfully in {}", out_dir);
- }
- Err(e) => {
- eprintln!("Error generating Python bindings: {}", e);
- process::exit(1);
- }
- }
- }
- Err(e) => {
- eprintln!("Error loading library metadata: {}", e);
- process::exit(1);
- }
- }
-}
-
diff --git a/src/bin/uniffi-bindgen-swift.rs b/src/bin/uniffi-bindgen-swift.rs
deleted file mode 100644
index 5396425..0000000
--- a/src/bin/uniffi-bindgen-swift.rs
+++ /dev/null
@@ -1,71 +0,0 @@
-use std::env;
-use std::process;
-use uniffi_bindgen::bindings::{generate_swift_bindings, SwiftBindingsOptions};
-use camino::Utf8PathBuf;
-
-fn main() {
- let args: Vec = env::args().collect();
-
- if args.len() < 3 {
- eprintln!("UniFFI Swift Binding Generator");
- eprintln!();
- eprintln!("Usage: {} [--swift-sources] [--headers] [--modulemap]", args[0]);
- eprintln!();
- eprintln!("Options:");
- eprintln!(" --swift-sources Generate .swift source files (default)");
- eprintln!(" --headers Generate .h header files");
- eprintln!(" --modulemap Generate modulemap");
- eprintln!(" --xcframework Generate XCFramework-compatible modulemap");
- eprintln!();
- eprintln!("Examples:");
- eprintln!(" {} target/release/libdojo_c.dylib bindings/swift --swift-sources", args[0]);
- eprintln!(" {} target/release/libdojo_c.dylib bindings/swift --headers --modulemap", args[0]);
- eprintln!();
- process::exit(1);
- }
-
- let library_path = Utf8PathBuf::from(&args[1]);
- let out_dir = Utf8PathBuf::from(&args[2]);
-
- if !library_path.exists() {
- eprintln!("Error: Library file not found: {}", library_path);
- eprintln!("Build the library first with: cargo build --release");
- process::exit(1);
- }
-
- // Parse options
- let has_swift_sources = args.contains(&"--swift-sources".to_string());
- let has_headers = args.contains(&"--headers".to_string());
- let has_modulemap = args.contains(&"--modulemap".to_string());
- let has_xcframework = args.contains(&"--xcframework".to_string());
-
- // Default to generating Swift sources if no specific flags are provided
- let generate_swift_sources = has_swift_sources || (!has_headers && !has_modulemap);
-
- println!("Generating Swift bindings...");
- println!("Library: {}", library_path);
- println!("Output: {}", out_dir);
-
- let options = SwiftBindingsOptions {
- generate_swift_sources,
- generate_headers: has_headers,
- generate_modulemap: has_modulemap,
- source: library_path,
- out_dir,
- xcframework: has_xcframework,
- module_name: Some("DojoEngine".to_string()),
- modulemap_filename: None,
- metadata_no_deps: false,
- link_frameworks: vec![],
- };
-
- match generate_swift_bindings(options) {
- Ok(_) => {
- println!("✓ Swift bindings generated successfully!");
- }
- Err(e) => {
- eprintln!("Error generating bindings: {}", e);
- process::exit(1);
- }
- }
-}
diff --git a/src/dojo.udl b/src/dojo.udl
deleted file mode 100644
index 9f3070f..0000000
--- a/src/dojo.udl
+++ /dev/null
@@ -1,746 +0,0 @@
-// Simplified UDL - just the types that work easily with UniFFI
-// Complex recursive types (Primitive, Ty, Clause, etc.) should be handled
-// via procmacros in Rust rather than UDL
-
-namespace dojo {
-};
-
-// Core types
-[Custom]
-typedef string FieldElement;
-
-[Custom]
-typedef string U256;
-
-// Enums
-enum PaginationDirection {
- "Forward",
- "Backward",
-};
-
-enum OrderDirection {
- "Asc",
- "Desc",
-};
-
-enum ContractType {
- "WORLD",
- "ERC20",
- "ERC721",
- "ERC1155",
- "UDC",
- "OTHER",
-};
-
-enum CallType {
- "Execute",
- "ExecuteFromOutside",
-};
-
-enum PatternMatching {
- "FixedLen",
- "VariableLen",
-};
-
-enum LogicalOperator {
- "And",
- "Or",
-};
-
-enum ComparisonOperator {
- "Eq",
- "Neq",
- "Gt",
- "Gte",
- "Lt",
- "Lte",
- "In",
- "NotIn",
- "Contains",
- "ContainsAll",
- "ContainsAny",
- "ArrayLengthEq",
- "ArrayLengthGt",
- "ArrayLengthLt",
-};
-
-// Basic structures
-dictionary Signature {
- FieldElement r;
- FieldElement s;
-};
-
-dictionary OrderBy {
- string field;
- OrderDirection direction;
-};
-
-dictionary Pagination {
- string? cursor;
- u32? limit;
- PaginationDirection direction;
- sequence order_by;
-};
-
-// Controller
-dictionary Controller {
- FieldElement address;
- string username;
- u64 deployed_at_timestamp;
-};
-
-dictionary ControllerQuery {
- Pagination pagination;
- sequence contract_addresses;
- sequence usernames;
-};
-
-// Token types
-dictionary Token {
- FieldElement contract_address;
- U256? token_id;
- string name;
- string symbol;
- u8 decimals;
- string metadata;
- U256? total_supply;
-};
-
-dictionary TokenBalance {
- U256 balance;
- FieldElement account_address;
- FieldElement contract_address;
- U256? token_id;
-};
-
-dictionary TokenContract {
- FieldElement contract_address;
- string name;
- string symbol;
- u8 decimals;
- string metadata;
- string token_metadata;
- U256? total_supply;
-};
-
-dictionary AttributeFilter {
- string trait_name;
- string trait_value;
-};
-
-dictionary TokenQuery {
- sequence contract_addresses;
- sequence token_ids;
- sequence attribute_filters;
- Pagination pagination;
-};
-
-dictionary TokenBalanceQuery {
- sequence contract_addresses;
- sequence account_addresses;
- sequence token_ids;
- Pagination pagination;
-};
-
-dictionary TokenContractQuery {
- sequence contract_addresses;
- sequence contract_types;
- Pagination pagination;
-};
-
-dictionary TokenTransfer {
- string id;
- FieldElement contract_address;
- FieldElement from_address;
- FieldElement to_address;
- U256 amount;
- U256? token_id;
- u64 executed_at;
- string? event_id;
-};
-
-dictionary TokenTransferQuery {
- sequence contract_addresses;
- sequence account_addresses;
- sequence token_ids;
- Pagination pagination;
-};
-
-// Contract
-dictionary Contract {
- FieldElement contract_address;
- ContractType contract_type;
- u64? head;
- u64? tps;
- u64? last_block_timestamp;
- FieldElement? last_pending_block_tx;
- u64 updated_at;
- u64 created_at;
-};
-
-dictionary ContractQuery {
- sequence contract_addresses;
- sequence contract_types;
-};
-
-// Transaction types
-dictionary TransactionCall {
- FieldElement contract_address;
- string entrypoint;
- sequence calldata;
- CallType call_type;
- FieldElement caller_address;
-};
-
-dictionary Transaction {
- FieldElement transaction_hash;
- FieldElement sender_address;
- sequence calldata;
- FieldElement max_fee;
- sequence signature;
- FieldElement nonce;
- u64 block_number;
- string transaction_type;
- u64 block_timestamp;
- sequence calls;
- sequence unique_models;
-};
-
-dictionary TransactionFilter {
- sequence transaction_hashes;
- sequence caller_addresses;
- sequence contract_addresses;
- sequence entrypoints;
- sequence model_selectors;
- u64? from_block;
- u64? to_block;
-};
-
-dictionary TransactionQuery {
- TransactionFilter? filter;
- Pagination pagination;
-};
-
-// Aggregation
-dictionary AggregationQuery {
- sequence aggregator_ids;
- sequence entity_ids;
- Pagination pagination;
-};
-
-dictionary AggregationEntry {
- string id;
- string aggregator_id;
- string entity_id;
- U256 value;
- string display_value;
- u64 position;
- string model_id;
- u64 created_at;
- u64 updated_at;
-};
-
-// Activity
-dictionary ActionCount {
- string action_name;
- u32 count;
-};
-
-dictionary Activity {
- string id;
- FieldElement world_address;
- string namespace;
- FieldElement caller_address;
- u64 session_start;
- u64 session_end;
- u32 action_count;
- sequence actions;
- u64 updated_at;
-};
-
-dictionary ActivityQuery {
- sequence world_addresses;
- sequence namespaces;
- sequence caller_addresses;
- u64? from_time;
- u64? to_time;
- Pagination pagination;
-};
-
-// Achievement
-dictionary AchievementTask {
- string task_id;
- string description;
- u32 total;
- u32 total_completions;
- f64 completion_rate;
- u64 created_at;
-};
-
-dictionary Achievement {
- string id;
- FieldElement world_address;
- string namespace;
- string entity_id;
- boolean hidden;
- u32 index;
- u32 points;
- string start;
- string end;
- string group;
- string icon;
- string title;
- string description;
- sequence tasks;
- string? data;
- u32 total_completions;
- f64 completion_rate;
- u64 created_at;
- u64 updated_at;
-};
-
-dictionary AchievementQuery {
- sequence world_addresses;
- sequence namespaces;
- boolean? hidden;
- Pagination pagination;
-};
-
-dictionary TaskProgress {
- string task_id;
- u32 count;
- boolean completed;
-};
-
-dictionary PlayerAchievementProgress {
- Achievement achievement;
- sequence task_progress;
- boolean completed;
- f64 progress_percentage;
-};
-
-dictionary PlayerAchievementStats {
- u32 total_points;
- u32 completed_achievements;
- u32 total_achievements;
- f64 completion_percentage;
- u64? last_achievement_at;
- u64 created_at;
- u64 updated_at;
-};
-
-dictionary PlayerAchievementEntry {
- FieldElement player_address;
- PlayerAchievementStats stats;
- sequence achievements;
-};
-
-dictionary PlayerAchievementQuery {
- sequence world_addresses;
- sequence namespaces;
- sequence player_addresses;
- Pagination pagination;
-};
-
-// General entity query
-dictionary Query {
- sequence world_addresses;
- Pagination pagination;
- Clause? clause;
- boolean no_hashed_keys;
- sequence models;
- boolean historical;
-};
-
-// Event query
-dictionary EventQuery {
- KeysClause? keys;
- Pagination pagination;
-};
-
-// SQL query result types
-dictionary SqlField {
- string name;
- SqlValue value;
-};
-
-dictionary SqlRow {
- sequence fields;
-};
-
-[Enum]
-interface SqlValue {
- Text(string value);
- Integer(i64 value);
- Real(f64 value);
- Blob(sequence value);
- Null();
-};
-
-dictionary AchievementProgression {
- string id;
- string achievement_id;
- string task_id;
- FieldElement world_address;
- string namespace;
- FieldElement player_id;
- u32 count;
- boolean completed;
- u64? completed_at;
- u64 created_at;
- u64 updated_at;
-};
-
-// Schema types - Complex enums with associated data
-
-[Enum]
-interface Primitive {
- I8(i8 value);
- I16(i16 value);
- I32(i32 value);
- I64(i64 value);
- I128(sequence value);
- U8(u8 value);
- U16(u16 value);
- U32(u32 value);
- U64(u64 value);
- U128(sequence value);
- U256(U256 value);
- Bool(boolean value);
- Felt252(FieldElement value);
- ClassHash(FieldElement value);
- ContractAddress(FieldElement value);
- EthAddress(FieldElement value);
-};
-
-[Enum]
-interface MemberValue {
- Primitive(Primitive value);
- String(string value);
- List(sequence values);
-};
-
-dictionary Member {
- string name;
- Ty ty;
- boolean key;
-};
-
-dictionary Struct {
- string name;
- sequence children;
-};
-
-dictionary EnumOption {
- string name;
- Ty ty;
-};
-
-dictionary EnumType {
- string name;
- u8 option;
- sequence options;
-};
-
-dictionary FixedSizeArray {
- sequence array;
- u32 size;
-};
-
-[Enum]
-interface Ty {
- Primitive(Primitive value);
- Struct(Struct value);
- Enum(EnumType value);
- Tuple(sequence values);
- Array(sequence values);
- FixedSizeArray(FixedSizeArray value);
- ByteArray(string value);
-};
-
-[Enum]
-interface ValueType {
- String(string value);
- Int(i64 value);
- UInt(u64 value);
- Bool(boolean value);
- Bytes(sequence value);
-};
-
-// Query types - Complex enums for query building
-
-dictionary KeysClause {
- sequence keys;
- PatternMatching pattern_matching;
- sequence models;
-};
-
-dictionary MemberClause {
- string model;
- string member;
- ComparisonOperator operator;
- MemberValue value;
-};
-
-dictionary CompositeClause {
- LogicalOperator operator;
- sequence clauses;
-};
-
-[Enum]
-interface Clause {
- HashedKeys(sequence keys);
- Keys(KeysClause clause);
- Member(MemberClause clause);
- Composite(CompositeClause clause);
-};
-
-// Event and Message types
-dictionary Event {
- sequence keys;
- sequence data;
- FieldElement transaction_hash;
-};
-
-dictionary Message {
- string message;
- sequence signature;
- FieldElement world_address;
-};
-
-// Entity, Model, and World types
-dictionary Entity {
- FieldElement world_address;
- FieldElement hashed_keys;
- sequence models;
- u64 created_at;
- u64 updated_at;
- u64 executed_at;
-};
-
-dictionary Model {
- FieldElement world_address;
- Ty schema;
- string namespace;
- string name;
- FieldElement selector;
- u32 packed_size;
- u32 unpacked_size;
- FieldElement class_hash;
- FieldElement contract_address;
- string layout;
- boolean use_legacy_store;
-};
-
-dictionary World {
- FieldElement world_address;
- sequence models;
-};
-
-// Error type
-[Error]
-enum DojoError {
- "ClientError",
- "SerializationError",
- "NetworkError",
- "InvalidInput",
- "ConnectionError",
- "PublishError",
- "QueryError",
- "SubscriptionError",
-};
-
-// Callback interfaces for subscriptions
-callback interface EntityUpdateCallback {
- void on_update(Entity entity);
- void on_error(string error);
-};
-
-callback interface TokenBalanceUpdateCallback {
- void on_update(TokenBalance balance);
- void on_error(string error);
-};
-
-callback interface TokenUpdateCallback {
- void on_update(Token token);
- void on_error(string error);
-};
-
-callback interface TransactionUpdateCallback {
- void on_update(Transaction transaction);
- void on_error(string error);
-};
-
-callback interface EventUpdateCallback {
- void on_update(Event event);
- void on_error(string error);
-};
-
-// Pagination wrapper types for different result types
-dictionary PageController {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageToken {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageTokenBalance {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageTokenContract {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageTokenTransfer {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageTransaction {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageAggregationEntry {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageActivity {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageAchievement {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PagePlayerAchievement {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageEntity {
- sequence items;
- string? next_cursor;
-};
-
-dictionary PageEvent {
- sequence items;
- string? next_cursor;
-};
-
-// Main Dojo client interface
-interface ToriiClient {
- // Constructor - create a new client with default config (4MB max message size)
- [Async, Throws=DojoError]
- constructor(string torii_url);
-
- // Constructor - create a new client with custom max message size
- [Async, Throws=DojoError, Name=new_with_config]
- constructor(string torii_url, u64 max_message_size);
-
- // Publish offchain message
- [Async, Throws=DojoError]
- string publish_message(Message message);
-
- // Publish multiple offchain messages
- [Async, Throws=DojoError]
- sequence publish_message_batch(sequence messages);
-
- // Get world metadata
- [Async, Throws=DojoError]
- sequence worlds(sequence world_addresses);
-
- // Query controllers
- [Async, Throws=DojoError]
- PageController controllers(ControllerQuery query);
-
- // Query contracts
- [Async, Throws=DojoError]
- sequence contracts(ContractQuery query);
-
- // Query tokens
- [Async, Throws=DojoError]
- PageToken tokens(TokenQuery query);
-
- // Query token balances
- [Async, Throws=DojoError]
- PageTokenBalance token_balances(TokenBalanceQuery query);
-
- // Query token contracts
- [Async, Throws=DojoError]
- PageTokenContract token_contracts(TokenContractQuery query);
-
- // Query token transfers
- [Async, Throws=DojoError]
- PageTokenTransfer token_transfers(TokenTransferQuery query);
-
- // Query transactions
- [Async, Throws=DojoError]
- PageTransaction transactions(TransactionQuery query);
-
- // Query aggregations (leaderboards, stats, rankings)
- [Async, Throws=DojoError]
- PageAggregationEntry aggregations(AggregationQuery query);
-
- // Query activities (user session tracking)
- [Async, Throws=DojoError]
- PageActivity activities(ActivityQuery query);
-
- // Query achievements
- [Async, Throws=DojoError]
- PageAchievement achievements(AchievementQuery query);
-
- // Query player achievements
- [Async, Throws=DojoError]
- PagePlayerAchievement player_achievements(PlayerAchievementQuery query);
-
- // Query entities
- [Async, Throws=DojoError]
- PageEntity entities(Query query);
-
- // Query event messages
- [Async, Throws=DojoError]
- PageEntity event_messages(Query query);
-
- // Query Starknet events
- [Async, Throws=DojoError]
- PageEvent starknet_events(EventQuery query);
-
- // Execute SQL query
- [Async, Throws=DojoError]
- sequence sql(string query);
-
- // Subscription methods
- // Subscribe to entity updates
- [Async, Throws=DojoError]
- u64 subscribe_entity_updates(Clause? clause, sequence world_addresses, EntityUpdateCallback callback);
-
- // Subscribe to token balance updates
- [Async, Throws=DojoError]
- u64 subscribe_token_balance_updates(sequence contract_addresses, sequence account_addresses, sequence token_ids, TokenBalanceUpdateCallback callback);
-
- // Subscribe to token updates
- [Async, Throws=DojoError]
- u64 subscribe_token_updates(sequence contract_addresses, sequence token_ids, TokenUpdateCallback callback);
-
- // Subscribe to transaction updates
- [Async, Throws=DojoError]
- u64 subscribe_transaction_updates(TransactionFilter? filter, TransactionUpdateCallback callback);
-
- // Subscribe to starknet event updates
- [Async, Throws=DojoError]
- u64 subscribe_event_updates(sequence keys, EventUpdateCallback callback);
-
- // Cancel a subscription
- [Throws=DojoError]
- void cancel_subscription(u64 subscription_id);
-};
diff --git a/src/lib.rs b/src/lib.rs
deleted file mode 100644
index c57aca1..0000000
--- a/src/lib.rs
+++ /dev/null
@@ -1,7 +0,0 @@
-// Re-export the appropriate crate based on compilation target
-
-#[cfg(not(target_arch = "wasm32"))]
-pub use c::*;
-
-#[cfg(target_arch = "wasm32")]
-pub use wasm::*;
diff --git a/src/uniffi/README.md b/src/uniffi/README.md
deleted file mode 100644
index 74dfd3d..0000000
--- a/src/uniffi/README.md
+++ /dev/null
@@ -1,93 +0,0 @@
-# UniFFI Bindings
-
-This module provides foreign function interface (FFI) bindings for Dojo using Mozilla's [UniFFI](https://mozilla.github.io/uniffi-rs/) framework.
-
-## Structure
-
-```
-src/
-├── dojo.udl # UniFFI interface definition (required at crate root)
-├── uniffi/ # UniFFI implementation
-│ ├── mod.rs # Main module definition
-│ ├── client.rs # ToriiClient implementation with subscription support
-│ ├── types/ # Type definitions organized by domain
-│ │ ├── mod.rs # Types module definition
-│ │ ├── core.rs # Core types (FieldElement, U256, DojoError, Pagination)
-│ │ ├── achievement.rs # Achievement and player achievement types
-│ │ ├── activity.rs # Activity tracking types
-│ │ ├── aggregation.rs # Aggregation (leaderboards, stats) types
-│ │ ├── contract.rs # Contract query types
-│ │ ├── controller.rs # Controller types
-│ │ ├── entity.rs # Entity, Model, and World types
-│ │ ├── event.rs # Event and event query types
-│ │ ├── query.rs # Query types (Clause, KeysClause, etc.)
-│ │ ├── schema.rs # Schema types (Ty, Struct, Enum, etc.)
-│ │ ├── token.rs # Token and token-related types
-│ │ └── transaction.rs # Transaction types and filters
-│ └── README.md # This file
-└── bin/ # Binding generator binaries
- ├── uniffi-bindgen-swift.rs
- ├── uniffi-bindgen-kotlin.rs
- └── uniffi-bindgen-python.rs
-```
-
-**Note:** The `dojo.udl` file must be in `src/` (not `src/uniffi/`) because UniFFI requires it to be at the crate root level.
-
-## Supported Languages
-
-- **Swift** - iOS/macOS applications
-- **Kotlin** - Android applications
-- **Python** - Python applications and scripts
-
-## Generating Bindings
-
-Use the provided bindgen binaries:
-
-```bash
-# Swift
-cargo run --bin uniffi-bindgen-swift --release -- target/release/libdojo_c.dylib bindings/swift --swift-sources
-
-# Kotlin
-cargo run --bin uniffi-bindgen-kotlin --release -- target/release/libdojo_c.dylib bindings/kotlin
-
-# Python
-cargo run --bin uniffi-bindgen-python --release -- target/release/libdojo_c.dylib bindings/python
-```
-
-## Features
-
-### ToriiClient
-
-The main client interface provides:
-
-- **Queries**: entities, events, tokens, transactions, controllers, contracts, etc.
-- **Subscriptions**: Real-time updates via callbacks
-- **Message Publishing**: Submit offchain messages to the world
-- **SQL Queries**: Direct database queries
-
-### Subscriptions
-
-Subscriptions use callbacks for real-time updates:
-
-- `subscribe_entity_updates` - Entity state changes
-- `subscribe_token_balance_updates` - Token balance changes
-- `subscribe_token_updates` - Token metadata updates
-- `subscribe_transaction_updates` - Transaction updates
-- `subscribe_event_updates` - Starknet event updates
-- `cancel_subscription` - Cancel an active subscription
-
-### Type System
-
-All types are automatically converted between Rust and target languages:
-
-- **FieldElement**: Starknet field element (represented as hex string)
-- **U256**: 256-bit unsigned integer (represented as hex string)
-- **Enums**: Rust enums → Swift enums / Kotlin sealed classes / Python classes
-- **Structs**: Rust structs → Swift structs / Kotlin data classes / Python dataclasses
-- **Options**: `Option` → nullable types in target languages
-- **Errors**: `DojoError` enum for all error cases
-
-## UDL Definition
-
-The interface is defined in `src/dojo.udl` using UniFFI Definition Language.
-
diff --git a/src/uniffi/client.rs b/src/uniffi/client.rs
deleted file mode 100644
index 0f97cad..0000000
--- a/src/uniffi/client.rs
+++ /dev/null
@@ -1,537 +0,0 @@
-// Client wrapper for UniFFI - exposes torii_client functionality
-
-use super::types::*;
-use std::sync::{Arc, Mutex};
-use std::sync::atomic::{AtomicU64, Ordering};
-use std::collections::HashMap;
-use tokio::task::JoinHandle;
-
-// Callback traits for subscriptions
-pub trait EntityUpdateCallback: Send + Sync {
- fn on_update(&self, entity: Entity);
- fn on_error(&self, error: String);
-}
-
-pub trait TokenBalanceUpdateCallback: Send + Sync {
- fn on_update(&self, balance: TokenBalance);
- fn on_error(&self, error: String);
-}
-
-pub trait TokenUpdateCallback: Send + Sync {
- fn on_update(&self, token: Token);
- fn on_error(&self, error: String);
-}
-
-pub trait TransactionUpdateCallback: Send + Sync {
- fn on_update(&self, transaction: Transaction);
- fn on_error(&self, error: String);
-}
-
-pub trait EventUpdateCallback: Send + Sync {
- fn on_update(&self, event: Event);
- fn on_error(&self, error: String);
-}
-
-/// Main Dojo client for interacting with the Torii indexer
-pub struct ToriiClient {
- inner: Arc,
- subscriptions: Arc>>>,
- next_sub_id: Arc,
-}
-
-impl ToriiClient {
- /// Create a new Torii client with default configuration (4MB max message size)
- pub async fn new(torii_url: String) -> Result {
- let client = torii_client::Client::new(torii_url)
- .await
- .map_err(|_e| DojoError::ConnectionError)?;
-
- Ok(Self {
- inner: Arc::new(client),
- subscriptions: Arc::new(Mutex::new(HashMap::new())),
- next_sub_id: Arc::new(AtomicU64::new(0)),
- })
- }
-
- /// Create a new Torii client with custom max message size
- pub async fn new_with_config(
- torii_url: String,
- max_message_size: u64,
- ) -> Result {
- let client = torii_client::Client::new_with_config(torii_url, max_message_size as usize)
- .await
- .map_err(|_e| DojoError::ConnectionError)?;
-
- Ok(Self {
- inner: Arc::new(client),
- subscriptions: Arc::new(Mutex::new(HashMap::new())),
- next_sub_id: Arc::new(AtomicU64::new(0)),
- })
- }
-
- /// Publish an offchain message to the world
- /// Returns the entity ID of the published message
- pub async fn publish_message(&self, message: Message) -> Result {
- let msg: torii_proto::Message = message.into();
- self.inner
- .publish_message(msg)
- .await
- .map_err(|_| DojoError::PublishError)
- }
-
- /// Publish multiple offchain messages to the world
- /// Returns the entity IDs of the published messages
- pub async fn publish_message_batch(&self, messages: Vec) -> Result, DojoError> {
- let msgs: Vec = messages
- .into_iter()
- .map(|m| m.into())
- .collect();
- self.inner
- .publish_message_batch(msgs)
- .await
- .map_err(|_| DojoError::PublishError)
- }
-
- /// Get world metadata for specified world addresses
- pub async fn worlds(&self, world_addresses: Vec) -> Result, DojoError> {
- let addrs: Result, DojoError> = world_addresses
- .iter()
- .map(field_element_to_felt)
- .collect();
- let addrs = addrs?;
-
- let worlds = self.inner
- .worlds(addrs)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(worlds.into_iter().map(|w| w.into()).collect())
- }
-
- /// Retrieve controllers matching the query
- pub async fn controllers(&self, query: ControllerQuery) -> Result {
- let q: torii_proto::ControllerQuery = query.into();
- let page = self.inner
- .controllers(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageController {
- items: page.items.into_iter().map(|c| c.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve contracts matching the query
- pub async fn contracts(&self, query: ContractQuery) -> Result, DojoError> {
- let q: torii_proto::ContractQuery = query.into();
- let contracts = self.inner
- .contracts(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(contracts.into_iter().map(|c| c.into()).collect())
- }
-
- /// Retrieve tokens matching the query
- pub async fn tokens(&self, query: TokenQuery) -> Result {
- let q: torii_proto::TokenQuery = query.into();
- let page = self.inner
- .tokens(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageToken {
- items: page.items.into_iter().map(|t| t.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve token balances
- pub async fn token_balances(&self, query: TokenBalanceQuery) -> Result {
- let q: torii_proto::TokenBalanceQuery = query.into();
- let page = self.inner
- .token_balances(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageTokenBalance {
- items: page.items.into_iter().map(|b| b.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve token contracts
- pub async fn token_contracts(&self, query: TokenContractQuery) -> Result {
- let q: torii_proto::TokenContractQuery = query.into();
- let page = self.inner
- .token_contracts(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageTokenContract {
- items: page.items.into_iter().map(|tc| tc.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve token transfers
- pub async fn token_transfers(&self, query: TokenTransferQuery) -> Result {
- let q: torii_proto::TokenTransferQuery = query.into();
- let page = self.inner
- .token_transfers(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageTokenTransfer {
- items: page.items.into_iter().map(|t| t.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve transactions
- pub async fn transactions(&self, query: TransactionQuery) -> Result {
- let q: torii_proto::TransactionQuery = query.into();
- let page = self.inner
- .transactions(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageTransaction {
- items: page.items.into_iter().map(|t| t.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve aggregations (leaderboards, stats, rankings)
- pub async fn aggregations(&self, query: AggregationQuery) -> Result {
- let q: torii_proto::AggregationQuery = query.into();
- let page = self.inner
- .aggregations(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageAggregationEntry {
- items: page.items.into_iter().map(|a| a.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve activities (user session tracking)
- pub async fn activities(&self, query: ActivityQuery) -> Result {
- let q: torii_proto::ActivityQuery = query.into();
- let page = self.inner
- .activities(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageActivity {
- items: page.items.into_iter().map(|a| a.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve achievements
- pub async fn achievements(&self, query: AchievementQuery) -> Result {
- let q: torii_proto::AchievementQuery = query.into();
- let page = self.inner
- .achievements(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageAchievement {
- items: page.items.into_iter().map(|a| a.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve player achievements
- pub async fn player_achievements(&self, query: PlayerAchievementQuery) -> Result {
- let q: torii_proto::PlayerAchievementQuery = query.into();
- let page = self.inner
- .player_achievements(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PagePlayerAchievement {
- items: page.items.into_iter().map(|p| p.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve entities matching the query
- pub async fn entities(&self, query: Query) -> Result {
- let q: torii_proto::Query = query.into();
- let page = self.inner
- .entities(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageEntity {
- items: page.items.into_iter().map(|e| e.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve event messages matching the query
- pub async fn event_messages(&self, query: Query) -> Result {
- let q: torii_proto::Query = query.into();
- let page = self.inner
- .event_messages(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageEntity {
- items: page.items.into_iter().map(|e| e.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Retrieve raw Starknet events
- pub async fn starknet_events(&self, query: EventQuery) -> Result {
- let q: torii_proto::EventQuery = query.try_into()?;
- let page = self.inner
- .starknet_events(q)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- Ok(PageEvent {
- items: page.items.into_iter().map(|e| e.into()).collect(),
- next_cursor: page.next_cursor,
- })
- }
-
- /// Execute a SQL query against the Torii database
- pub async fn sql(&self, query: String) -> Result, DojoError> {
- let rows = self.inner
- .sql(query)
- .await
- .map_err(|e| DojoError::QueryError(e.to_string()))?;
-
- rows.into_iter().map(|r| r.try_into()).collect()
- }
-
- /// Subscribe to entity updates
- pub async fn subscribe_entity_updates(
- &self,
- clause: Option,
- world_addresses: Vec,
- callback: Box,
- ) -> Result {
- let sub_id = self.next_sub_id.fetch_add(1, Ordering::SeqCst);
-
- let addrs: Result, DojoError> = world_addresses
- .iter()
- .map(field_element_to_felt)
- .collect();
- let addrs = addrs?;
-
- let clause_proto = clause.map(|c| c.into());
-
- let mut stream = self.inner
- .on_entity_updated(clause_proto, addrs)
- .await
- .map_err(|_| DojoError::SubscriptionError)?;
-
- let handle = tokio::spawn(async move {
- use futures_util::StreamExt;
- // Skip the first message which contains the subscription ID
- let _ = stream.next().await;
-
- while let Some(result) = stream.next().await {
- match result {
- Ok((_id, entity)) => {
- callback.on_update(entity.into());
- }
- Err(e) => {
- callback.on_error(e.to_string());
- break;
- }
- }
- }
- });
-
- self.subscriptions.lock().unwrap().insert(sub_id, handle);
- Ok(sub_id)
- }
-
- /// Subscribe to token balance updates
- pub async fn subscribe_token_balance_updates(
- &self,
- contract_addresses: Vec,
- account_addresses: Vec,
- token_ids: Vec,
- callback: Box,
- ) -> Result {
- let sub_id = self.next_sub_id.fetch_add(1, Ordering::SeqCst);
-
- let contracts: Result, DojoError> = contract_addresses
- .iter()
- .map(field_element_to_felt)
- .collect();
- let accounts: Result, DojoError> = account_addresses
- .iter()
- .map(field_element_to_felt)
- .collect();
- let ids: Result, DojoError> = token_ids
- .iter()
- .map(uniffi_to_u256)
- .collect();
-
- let mut stream = self.inner
- .on_token_balance_updated(contracts?, accounts?, ids?)
- .await
- .map_err(|_| DojoError::SubscriptionError)?;
-
- let handle = tokio::spawn(async move {
- use futures_util::StreamExt;
- // Skip the first message which contains the subscription ID
- let _ = stream.next().await;
-
- while let Some(result) = stream.next().await {
- match result {
- Ok((_id, balance)) => {
- callback.on_update(balance.into());
- }
- Err(e) => {
- callback.on_error(e.to_string());
- break;
- }
- }
- }
- });
-
- self.subscriptions.lock().unwrap().insert(sub_id, handle);
- Ok(sub_id)
- }
-
- /// Subscribe to token updates
- pub async fn subscribe_token_updates(
- &self,
- contract_addresses: Vec,
- token_ids: Vec,
- callback: Box,
- ) -> Result {
- let sub_id = self.next_sub_id.fetch_add(1, Ordering::SeqCst);
-
- let contracts: Result, DojoError> = contract_addresses
- .iter()
- .map(field_element_to_felt)
- .collect();
- let ids: Result, DojoError> = token_ids
- .iter()
- .map(uniffi_to_u256)
- .collect();
-
- let mut stream = self.inner
- .on_token_updated(contracts?, ids?)
- .await
- .map_err(|_| DojoError::SubscriptionError)?;
-
- let handle = tokio::spawn(async move {
- use futures_util::StreamExt;
- // Skip the first message which contains the subscription ID
- let _ = stream.next().await;
-
- while let Some(result) = stream.next().await {
- match result {
- Ok((_id, token)) => {
- callback.on_update(token.into());
- }
- Err(e) => {
- callback.on_error(e.to_string());
- break;
- }
- }
- }
- });
-
- self.subscriptions.lock().unwrap().insert(sub_id, handle);
- Ok(sub_id)
- }
-
- /// Subscribe to transaction updates
- pub async fn subscribe_transaction_updates(
- &self,
- filter: Option,
- callback: Box,
- ) -> Result {
- let sub_id = self.next_sub_id.fetch_add(1, Ordering::SeqCst);
-
- let filter_proto = filter.map(|f| f.into());
-
- let mut stream = self.inner
- .on_transaction(filter_proto)
- .await
- .map_err(|_| DojoError::SubscriptionError)?;
-
- let handle = tokio::spawn(async move {
- use futures_util::StreamExt;
- // Skip the first message which contains the subscription ID
- let _ = stream.next().await;
-
- while let Some(result) = stream.next().await {
- match result {
- Ok(transaction) => {
- callback.on_update(transaction.into());
- }
- Err(e) => {
- callback.on_error(e.to_string());
- break;
- }
- }
- }
- });
-
- self.subscriptions.lock().unwrap().insert(sub_id, handle);
- Ok(sub_id)
- }
-
- /// Subscribe to Starknet event updates
- pub async fn subscribe_event_updates(
- &self,
- keys: Vec,
- callback: Box,
- ) -> Result {
- let sub_id = self.next_sub_id.fetch_add(1, Ordering::SeqCst);
-
- let keys_proto: Vec = keys.into_iter().map(|k| k.into()).collect();
-
- let mut stream = self.inner
- .on_starknet_event(keys_proto)
- .await
- .map_err(|_| DojoError::SubscriptionError)?;
-
- let handle = tokio::spawn(async move {
- use futures_util::StreamExt;
- // Skip the first message which contains the subscription ID
- let _ = stream.next().await;
-
- while let Some(result) = stream.next().await {
- match result {
- Ok(event) => {
- callback.on_update(event.into());
- }
- Err(e) => {
- callback.on_error(e.to_string());
- break;
- }
- }
- }
- });
-
- self.subscriptions.lock().unwrap().insert(sub_id, handle);
- Ok(sub_id)
- }
-
- /// Cancel a subscription
- pub fn cancel_subscription(&self, subscription_id: u64) -> Result<(), DojoError> {
- let mut subs = self.subscriptions.lock().unwrap();
- if let Some(handle) = subs.remove(&subscription_id) {
- handle.abort();
- Ok(())
- } else {
- Err(DojoError::SubscriptionError)
- }
- }
-}
diff --git a/src/uniffi/mod.rs b/src/uniffi/mod.rs
deleted file mode 100644
index c422a6d..0000000
--- a/src/uniffi/mod.rs
+++ /dev/null
@@ -1,14 +0,0 @@
-// UniFFI bindings for Dojo
-//
-// This module provides foreign function interface bindings for multiple languages
-// (Swift, Kotlin, Python) using Mozilla's UniFFI framework.
-
-// Type definitions organized by domain
-pub mod types;
-
-// Client implementation
-pub mod client;
-
-// Re-export everything for convenience
-pub use client::*;
-pub use types::*;
diff --git a/src/uniffi/types/achievement.rs b/src/uniffi/types/achievement.rs
deleted file mode 100644
index ae67098..0000000
--- a/src/uniffi/types/achievement.rs
+++ /dev/null
@@ -1,239 +0,0 @@
-// Achievement types
-use super::core::*;
-
-#[derive(Debug, Clone)]
-pub struct AchievementTask {
- pub task_id: String,
- pub description: String,
- pub total: u32,
- pub total_completions: u32,
- pub completion_rate: f64,
- pub created_at: u64,
-}
-
-impl From for AchievementTask {
- fn from(val: torii_proto::AchievementTask) -> Self {
- AchievementTask {
- task_id: val.task_id,
- description: val.description,
- total: val.total,
- total_completions: val.total_completions,
- completion_rate: val.completion_rate,
- created_at: val.created_at.timestamp() as u64,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct Achievement {
- pub id: String,
- pub world_address: FieldElement,
- pub namespace: String,
- pub entity_id: String,
- pub hidden: bool,
- pub index: u32,
- pub points: u32,
- pub start: String,
- pub end: String,
- pub group: String,
- pub icon: String,
- pub title: String,
- pub description: String,
- pub tasks: Vec,
- pub data: Option,
- pub total_completions: u32,
- pub completion_rate: f64,
- pub created_at: u64,
- pub updated_at: u64,
-}
-
-impl From for Achievement {
- fn from(val: torii_proto::Achievement) -> Self {
- let tasks: Vec = val.tasks.into_iter().map(|t| t.into()).collect();
-
- Achievement {
- id: val.id,
- world_address: felt_to_field_element(val.world_address),
- namespace: val.namespace,
- entity_id: val.entity_id,
- hidden: val.hidden,
- index: val.index,
- points: val.points,
- start: val.start,
- end: val.end,
- group: val.group,
- icon: val.icon,
- title: val.title,
- description: val.description,
- tasks,
- data: val.data,
- total_completions: val.total_completions,
- completion_rate: val.completion_rate,
- created_at: val.created_at.timestamp() as u64,
- updated_at: val.updated_at.timestamp() as u64,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct AchievementQuery {
- pub world_addresses: Vec,
- pub namespaces: Vec,
- pub hidden: Option,
- pub pagination: Pagination,
-}
-
-impl From for torii_proto::AchievementQuery {
- fn from(val: AchievementQuery) -> Self {
- torii_proto::AchievementQuery {
- world_addresses: val
- .world_addresses
- .into_iter()
- .map(|a| field_element_to_felt(&a).unwrap())
- .collect(),
- namespaces: val.namespaces,
- hidden: val.hidden,
- pagination: val.pagination.into(),
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct TaskProgress {
- pub task_id: String,
- pub count: u32,
- pub completed: bool,
-}
-
-impl From for TaskProgress {
- fn from(val: torii_proto::TaskProgress) -> Self {
- TaskProgress { task_id: val.task_id, count: val.count, completed: val.completed }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct PlayerAchievementProgress {
- pub achievement: Achievement,
- pub task_progress: Vec,
- pub completed: bool,
- pub progress_percentage: f64,
-}
-
-impl From for PlayerAchievementProgress {
- fn from(val: torii_proto::PlayerAchievementProgress) -> Self {
- let task_progress: Vec = val.task_progress.into_iter().map(|t| t.into()).collect();
-
- PlayerAchievementProgress {
- achievement: val.achievement.into(),
- task_progress,
- completed: val.completed,
- progress_percentage: val.progress_percentage,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct PlayerAchievementStats {
- pub total_points: u32,
- pub completed_achievements: u32,
- pub total_achievements: u32,
- pub completion_percentage: f64,
- pub last_achievement_at: Option,
- pub created_at: u64,
- pub updated_at: u64,
-}
-
-impl From for PlayerAchievementStats {
- fn from(val: torii_proto::PlayerAchievementStats) -> Self {
- PlayerAchievementStats {
- total_points: val.total_points,
- completed_achievements: val.completed_achievements,
- total_achievements: val.total_achievements,
- completion_percentage: val.completion_percentage,
- last_achievement_at: val.last_achievement_at.map(|t| t.timestamp() as u64),
- created_at: val.created_at.timestamp() as u64,
- updated_at: val.updated_at.timestamp() as u64,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct PlayerAchievementEntry {
- pub player_address: FieldElement,
- pub stats: PlayerAchievementStats,
- pub achievements: Vec,
-}
-
-impl From for PlayerAchievementEntry {
- fn from(val: torii_proto::PlayerAchievementEntry) -> Self {
- let achievements: Vec =
- val.achievements.into_iter().map(|a| a.into()).collect();
-
- PlayerAchievementEntry {
- player_address: felt_to_field_element(val.player_address),
- stats: val.stats.into(),
- achievements,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct PlayerAchievementQuery {
- pub world_addresses: Vec,
- pub namespaces: Vec,
- pub player_addresses: Vec,
- pub pagination: Pagination,
-}
-
-impl From for torii_proto::PlayerAchievementQuery {
- fn from(val: PlayerAchievementQuery) -> Self {
- torii_proto::PlayerAchievementQuery {
- world_addresses: val
- .world_addresses
- .into_iter()
- .map(|a| field_element_to_felt(&a).unwrap())
- .collect(),
- namespaces: val.namespaces,
- player_addresses: val
- .player_addresses
- .into_iter()
- .map(|a| field_element_to_felt(&a).unwrap())
- .collect(),
- pagination: val.pagination.into(),
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct AchievementProgression {
- pub id: String,
- pub achievement_id: String,
- pub task_id: String,
- pub world_address: FieldElement,
- pub namespace: String,
- pub player_id: FieldElement,
- pub count: u32,
- pub completed: bool,
- pub completed_at: Option,
- pub created_at: u64,
- pub updated_at: u64,
-}
-
-impl From for AchievementProgression {
- fn from(val: torii_proto::AchievementProgression) -> Self {
- AchievementProgression {
- id: val.id,
- achievement_id: val.achievement_id,
- task_id: val.task_id,
- world_address: felt_to_field_element(val.world_address),
- namespace: val.namespace,
- player_id: felt_to_field_element(val.player_id),
- count: val.count,
- completed: val.completed,
- completed_at: val.completed_at.map(|t| t.timestamp() as u64),
- created_at: val.created_at.timestamp() as u64,
- updated_at: val.updated_at.timestamp() as u64,
- }
- }
-}
-
diff --git a/src/uniffi/types/activity.rs b/src/uniffi/types/activity.rs
deleted file mode 100644
index 29c37e3..0000000
--- a/src/uniffi/types/activity.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-// Activity types
-use super::core::*;
-use chrono::DateTime;
-
-#[derive(Debug, Clone)]
-pub struct ActionCount {
- pub action_name: String,
- pub count: u32,
-}
-
-#[derive(Debug, Clone)]
-pub struct Activity {
- pub id: String,
- pub world_address: FieldElement,
- pub namespace: String,
- pub caller_address: FieldElement,
- pub session_start: u64,
- pub session_end: u64,
- pub action_count: u32,
- pub actions: Vec,
- pub updated_at: u64,
-}
-
-impl From