Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature: Allow 512 GB persistant volume #629

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
7 changes: 3 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ dependencies = [
"aiohttp-jinja2==1.5",
"aioipfs @ git+https://github.com/aleph-im/aioipfs.git@d671c79b2871bb4d6c8877ba1e7f3ffbe7d20b71",
"alembic==1.12.1",
"aleph-message==0.4.9",
"aleph-message @ git+https://github.com/1yam/aleph-message@1yam-512-VM",
"aleph-nuls2==0.1",
"aleph-p2p-client @ git+https://github.com/aleph-im/p2p-service-client-python@2c04af39c566217f629fd89505ffc3270fba8676",
"aleph-pytezos==3.13.4",
Expand Down Expand Up @@ -157,9 +157,8 @@ dependencies = [
"isort==5.13.2",
"check-sdist==0.1.3",
"sqlalchemy[mypy]==1.4.41",
"yamlfix==1.16.1",
# because of aleph messages otherwise yamlfix install a too new version
"pydantic>=1.10.5,<2.0.0",
"yamlfix>=1.17",
"pydantic>=2,<3.0.0",
"pyproject-fmt==2.2.1",
"types-aiofiles",
"types-protobuf",
Expand Down
9 changes: 6 additions & 3 deletions src/aleph/chains/chain_data_service.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import json
from typing import Any, Dict, List, Mapping, Optional, Self, Set, Type, Union, cast

import aio_pika.abc
Expand Down Expand Up @@ -69,7 +70,7 @@ async def prepare_sync_event_payload(
messages=[OnChainMessage.from_orm(message) for message in messages]
),
)
archive_content: bytes = archive.json().encode("utf-8")
archive_content: bytes = archive.model_dump_json().encode("utf-8")

ipfs_cid = await self.storage_service.add_file(
session=session, file_content=archive_content, engine=ItemType.ipfs
Expand Down Expand Up @@ -166,7 +167,9 @@ def _get_tx_messages_smart_contract_protocol(tx: ChainTxDb) -> List[Dict[str, An
)

try:
payload = cast(GenericMessageEvent, payload_model.parse_obj(tx.content))
payload = cast(
GenericMessageEvent, payload_model.model_validate(tx.content)
)
except ValidationError:
raise InvalidContent(f"Incompatible tx content for {tx.chain}/{tx.hash}")

Expand All @@ -189,7 +192,7 @@ def _get_tx_messages_smart_contract_protocol(tx: ChainTxDb) -> List[Dict[str, An
item_hash=ItemHash(payload.content),
metadata=None,
)
item_content = content.json(exclude_none=True)
item_content = json.dumps(content.model_dump(exclude_none=True))
else:
item_content = payload.content

Expand Down
2 changes: 1 addition & 1 deletion src/aleph/chains/ethereum.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ async def packer(self, config: Config):
account,
int(gas_price * 1.1),
nonce,
sync_event_payload.json(),
sync_event_payload.model_dump_json(),
)
LOGGER.info("Broadcast %r on %s" % (response, CHAIN_NAME))

Expand Down
8 changes: 4 additions & 4 deletions src/aleph/chains/indexer_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def make_events_query(
model = SyncEvent
event_type_str = "syncEvents"

fields = "\n".join(model.__fields__.keys())
fields = "\n".join(model.model_fields.keys())
params: Dict[str, Any] = {
"blockchain": f'"{blockchain.value}"',
"limit": limit,
Expand Down Expand Up @@ -146,8 +146,8 @@ async def _query(self, query: str, model: Type[T]) -> T:

response = await self.http_session.post("/", json={"query": query})
response.raise_for_status()
response_json = await response.json()
return model.parse_obj(response_json)
response_json = await response.model_dump_json()
return model.model_validate(response_json)

async def fetch_account_state(
self,
Expand Down Expand Up @@ -196,7 +196,7 @@ def indexer_event_to_chain_tx(
if isinstance(indexer_event, MessageEvent):
protocol = ChainSyncProtocol.SMART_CONTRACT
protocol_version = 1
content = indexer_event.dict()
content = indexer_event.model_dump()
else:
sync_message = aleph_json.loads(indexer_event.message)

Expand Down
4 changes: 2 additions & 2 deletions src/aleph/chains/nuls2.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ async def packer(self, config: Config):
# Required to apply update to the files table in get_chaindata
session.commit()

content = sync_event_payload.json()
content = sync_event_payload.model_dump_json()
tx = await prepare_transfer_tx(
address,
[(target_addr, CHEAP_UNIT_FEE)],
Expand Down Expand Up @@ -248,7 +248,7 @@ async def get_transactions(
"pagination": 500,
},
) as resp:
jres = await resp.json()
jres = await resp.model_dump_json()
for tx in sorted(jres["transactions"], key=itemgetter("height")):
if remark is not None and tx["remark"] != remark:
continue
Expand Down
8 changes: 4 additions & 4 deletions src/aleph/chains/tezos.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def make_graphql_query(
async def get_indexer_status(http_session: aiohttp.ClientSession) -> SyncStatus:
response = await http_session.post("/", json={"query": make_graphql_status_query()})
response.raise_for_status()
response_json = await response.json()
response_json = await response.model_dump_json()

return SyncStatus(response_json["data"]["indexStatus"]["status"])

Expand All @@ -160,9 +160,9 @@ async def fetch_messages(

response = await http_session.post("/", json={"query": query})
response.raise_for_status()
response_json = await response.json()
response_json = await response.model_dump_json()

return IndexerResponse[IndexerMessageEvent].parse_obj(response_json)
return IndexerResponse[IndexerMessageEvent].model_validate(response_json)


def indexer_event_to_chain_tx(
Expand All @@ -176,7 +176,7 @@ def indexer_event_to_chain_tx(
publisher=indexer_event.source,
protocol=ChainSyncProtocol.SMART_CONTRACT,
protocol_version=1,
content=indexer_event.payload.dict(),
content=indexer_event.payload.model_dump(),
)

return chain_tx
Expand Down
5 changes: 2 additions & 3 deletions src/aleph/db/models/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
StoreContent,
)
from pydantic import ValidationError
from pydantic.error_wrappers import ErrorWrapper
from sqlalchemy import (
ARRAY,
TIMESTAMP,
Expand Down Expand Up @@ -62,14 +61,14 @@ def validate_message_content(
content_dict: Dict[str, Any],
) -> BaseContent:
content_type = CONTENT_TYPE_MAP[message_type]
content = content_type.parse_obj(content_dict)
content = content_type.model_validate(content_dict)
# Validate that the content time can be converted to datetime. This will
# raise a ValueError and be caught
# TODO: move this validation in aleph-message
try:
_ = dt.datetime.fromtimestamp(content_dict["time"])
except ValueError as e:
raise ValidationError([ErrorWrapper(e, loc="time")], model=content_type) from e
raise ValidationError(str(e)) from e

return content

Expand Down
2 changes: 1 addition & 1 deletion src/aleph/handlers/content/vm.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def vm_message_to_db(message: MessageDb) -> VmBaseDb:

if content.on.message:
vm.message_triggers = [
subscription.dict() for subscription in content.on.message
subscription.model_dump() for subscription in content.on.message
]

vm.code_volume = CodeVolumeDb(
Expand Down
5 changes: 2 additions & 3 deletions src/aleph/schemas/api/accounts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from decimal import Decimal
from typing import List

from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field

from aleph.types.files import FileType
from aleph.types.sort_order import SortOrder
Expand Down Expand Up @@ -40,8 +40,7 @@ class GetAccountFilesResponseItem(BaseModel):


class GetAccountFilesResponse(BaseModel):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)

address: str
total_size: int
Expand Down
54 changes: 24 additions & 30 deletions src/aleph/schemas/api/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,8 @@
ProgramContent,
StoreContent,
)
from pydantic import BaseModel, Field
from pydantic.generics import GenericModel
from pydantic import BaseModel, ConfigDict, Field

import aleph.toolkit.json as aleph_json
from aleph.db.models import MessageDb
from aleph.types.message_status import ErrorCode, MessageStatus

Expand All @@ -39,26 +37,26 @@
class MessageConfirmation(BaseModel):
"""Format of the result when a message has been confirmed on a blockchain"""

class Config:
orm_mode = True
json_encoders = {dt.datetime: lambda d: d.timestamp()}
model_config = ConfigDict(
from_attributes=True, json_encoders={dt.datetime: lambda d: d.timestamp()}
)

chain: Chain
height: int
hash: str


class BaseMessage(GenericModel, Generic[MType, ContentType]):
class Config:
orm_mode = True
json_loads = aleph_json.loads
json_encoders = {dt.datetime: lambda d: d.timestamp()}
class BaseMessage(BaseModel, Generic[MType, ContentType]):
model_config = ConfigDict(
from_attributes=True,
json_encoders={dt.datetime: lambda d: d.timestamp()},
)

sender: str
chain: Chain
signature: Optional[str]
signature: Optional[str] = None
type: MType
item_content: Optional[str]
item_content: Optional[str] = None
item_type: ItemType
item_hash: str
time: dt.datetime
Expand Down Expand Up @@ -133,7 +131,7 @@ def format_message(message: MessageDb) -> AlephMessage:
def format_message_dict(message: Dict[str, Any]) -> AlephMessage:
message_type = message.get("type")
message_cls = MESSAGE_CLS_DICT[message_type]
return message_cls.parse_obj(message)
return message_cls.model_validate(message)


class BaseMessageStatus(BaseModel):
Expand All @@ -145,45 +143,41 @@ class BaseMessageStatus(BaseModel):
# We already have a model for the validation of pending messages, but this one
# is only used for formatting and does not try to be smart.
class PendingMessage(BaseModel):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)

sender: str
chain: Chain
signature: Optional[str]
signature: Optional[str] = None
type: MessageType
item_content: Optional[str]
item_content: Optional[str] = None
item_type: ItemType
item_hash: str
time: dt.datetime
channel: Optional[str] = None
content: Optional[Dict[str, Any]]
content: Optional[Dict[str, Any]] = None
reception_time: dt.datetime


class PendingMessageStatus(BaseMessageStatus):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)

status: MessageStatus = MessageStatus.PENDING
messages: List[PendingMessage]


class ProcessedMessageStatus(BaseMessageStatus):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)

status: MessageStatus = MessageStatus.PROCESSED
message: AlephMessage


class ForgottenMessage(BaseModel):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)

sender: str
chain: Chain
signature: Optional[str]
signature: Optional[str] = None
type: MessageType
item_type: ItemType
item_hash: str
Expand All @@ -201,7 +195,7 @@ class RejectedMessageStatus(BaseMessageStatus):
status: MessageStatus = MessageStatus.REJECTED
message: Mapping[str, Any]
error_code: ErrorCode
details: Any
details: Any = None


MessageWithStatus = Union[
Expand All @@ -213,9 +207,9 @@ class RejectedMessageStatus(BaseMessageStatus):


class MessageListResponse(BaseModel):
class Config:
json_encoders = {dt.datetime: lambda d: d.timestamp()}
json_loads = aleph_json.loads
model_config = ConfigDict(
json_encoders={dt.datetime: lambda d: d.timestamp()},
)

messages: List[AlephMessage]
pagination_page: int
Expand Down
Loading
Loading