Skip to content

Commit

Permalink
Merge pull request #2719 from data-for-change/dev
Browse files Browse the repository at this point in the history
merge dev into master
  • Loading branch information
atalyaalon authored Oct 20, 2024
2 parents 0e4f713 + 34b2d8e commit 90b5617
Show file tree
Hide file tree
Showing 4 changed files with 68 additions and 30 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""add street numeric indexes to involved heberw
Revision ID: e962054e4422
Revises: mn9px8cacn24
Create Date: 2024-10-20 08:24:08.746964
"""

# revision identifiers, used by Alembic.
revision = 'e962054e4422'
down_revision = 'mn9px8cacn24'
branch_labels = None
depends_on = None

from alembic import op
# import sqlalchemy as sa


def upgrade():
op.create_index('ix_involved_markers_hebrew_street1',
'involved_markers_hebrew', ['street1'], unique=False)
op.create_index('ix_involved_markers_hebrew_street2',
'involved_markers_hebrew', ['street2'], unique=False)


def downgrade():
op.drop_index('ix_involved_markers_hebrew_street1', 'involved_markers_hebrew')
op.drop_index('ix_involved_markers_hebrew_street2', 'involved_markers_hebrew')
51 changes: 25 additions & 26 deletions anyway/parsers/infographics_data_cache_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,46 +179,45 @@ def get_streets() -> Iterable[Streets]:

def get_street_infographic_keys() -> Iterable[Dict[str, int]]:
for street in get_streets():
for y in CONST.INFOGRAPHICS_CACHE_YEARS_AGO:
yield {
"yishuv_symbol": street.yishuv_symbol,
"street1": street.street,
"years_ago": y,
"lang": "en",
}
if street_has_accidents(street.yishuv_symbol, street.street):
for y in CONST.INFOGRAPHICS_CACHE_YEARS_AGO:
yield {
"yishuv_symbol": street.yishuv_symbol,
"street1": street.street,
"years_ago": y,
"lang": "en",
}


def build_street_cache_into_temp():
start = datetime.now()
db.session.query(InfographicsStreetDataCacheTemp).delete()
db.session.commit()
for n, chunk in enumerate(chunked_generator(get_street_infographic_keys(), 4960)):
cache_chunk = [
{
"yishuv_symbol": d["yishuv_symbol"],
"street": d["street1"],
"years_ago": d["years_ago"],
"data": anyway.infographics_utils.create_infographics_data_for_location(d),
}
for d in chunk
if street_has_accidents(d["yishuv_symbol"], d["street1"])
]
if cache_chunk:
logging.debug(f"Adding chunk num {n}, {len(chunk)} entries.")
# pylint: disable=no-member
db.get_engine().execute(InfographicsStreetDataCacheTemp.__table__.insert(), cache_chunk)
for chunk in chunked_generator(get_street_infographic_keys(), 4960):
db.get_engine().execute(
InfographicsStreetDataCacheTemp.__table__.insert(),
[
{
"yishuv_symbol": d["yishuv_symbol"],
"street": d["street1"],
"years_ago": d["years_ago"],
"data": anyway.infographics_utils.create_infographics_data_for_location(d),
}
for d in chunk
],
)
db.session.commit()
logging.info(f"cache rebuild took:{str(datetime.now() - start)}")


def street_has_accidents(yishuv_symbol: int, street: int) -> bool:
return (
res = db.session.query(
db.session.query(AccidentMarker)
.filter(AccidentMarker.yishuv_symbol == yishuv_symbol)
.filter(or_(AccidentMarker.street1 == street, AccidentMarker.street2 == street))
.count()
> 0
)
.exists()
).scalar()
return res


def get_road_segments() -> Iterable[RoadSegments]:
Expand Down
6 changes: 3 additions & 3 deletions anyway/widgets/widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class Widget:
def __init__(self, request_params: RequestParams):
self.request_params = copy.deepcopy(request_params)
self.rank = -1
self.items = {}
self.items = None
self.text = {}
self.meta = {"widget_digest": self.widget_digest}
self.information = ""
Expand All @@ -53,7 +53,7 @@ def is_included(self) -> bool:

def generate_items(self) -> None:
"""Generates the data of the widget and set it to self.items"""
pass
self.items = {}

@staticmethod
def is_relevant(request_params: RequestParams) -> bool:
Expand Down Expand Up @@ -104,7 +104,7 @@ def generate_widget_data(cls, request_params: RequestParams):
return {}

def serialize(self):
if not self.items:
if self.items is None:
self.generate_items()
output = {"name": self.name, "data": {}}
output["data"]["items"] = self.items if self.is_included() else {}
Expand Down
13 changes: 12 additions & 1 deletion docs/DOCKER.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ Instructions

**1.** [Get the code](https://github.com/data-for-change/anyway#getting-the-code)

**2.** [Install Docker](https://docs.docker.com/install/) and [Install Docker Compose](https://docs.docker.com/compose/install/)
**2.** [Install Docker](https://docs.docker.com/install/) and [Install Docker Compose](https://docs.docker.com/compose/install/). (Install Docker for Windows, also when using WSL2)

**3.** Get the `.env` file with the required secret values and place in the project **root directory** - can be downloaded [from here](https://drive.google.com/file/d/1bgMyKlHoAAIixlk8qqmZaXPdmqCxldLu/view?usp=sharing). Note that this file **needs to be saved as `.env`** - with the `.` at the beginning of the name of the file.
**Continue with your OS, See below**
Expand Down Expand Up @@ -251,6 +251,17 @@ services:

This loads the ./anyway dir (relative to the docker-compose file) as /anyway/anyway in the docker overriding the inner volume and allowing you to run your own code inside the docker.


Common Errors
-----------------------
Q: When working in Windows environment you might encounter in this error during the build proccess.
```
anyway_1 | standard_init_linux.go:211: exec user process caused "no such file or directory"
```
A: The solution is very simple. You need to go to the docker-entrypoint.sh file and save it as CRLF (encoding of new line in Windows) instead of LF (encoding of new line in Linux).
In VS code just open the file and choose CLRF at the button right of the screen, for other IDEs check online for instructions.


Questions and ideas
-----------------
Talk to Atalya on ANYWAY's Slack (atalya) or email us [[email protected]](mailto:[email protected]).
Expand Down

0 comments on commit 90b5617

Please sign in to comment.