Skip to content

Commit 89ec124

Browse files
committed
Fix CI errors
1 parent ffdc61a commit 89ec124

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

tests/integration/actor_source_base/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@ RUN echo "Python version:" \
1212
&& echo "All installed Python packages:" \
1313
&& pip freeze
1414

15-
CMD ["sh", "-c", "python test_server.py & python -m src"]
15+
CMD ["sh", "-c", "python server.py & python -m src"]

tests/integration/test_crawlers_with_storages.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ async def main() -> None:
9494
failed_counter = 0
9595

9696
@crawler.error_handler
97-
async def failed_handler(_: BasicCrawlingContext, __: Exception) -> None:
97+
async def error_handler(_: BasicCrawlingContext, __: Exception) -> None:
9898
nonlocal failed_counter
9999
failed_counter += 1
100100

@@ -103,8 +103,8 @@ async def default_handler(_: ParselCrawlingContext) -> None:
103103
raise RuntimeError('Some error')
104104

105105
await crawler.run(['http://localhost:8080/'])
106-
# https://github.com/apify/crawlee-python/issues/1326 , should be max_retries + 1
107-
assert failed_counter == max_retries, f'{failed_counter=}'
106+
# https://github.com/apify/crawlee-python/issues/1326 , should be max_retries
107+
assert failed_counter == max_retries - 1, f'{failed_counter=}'
108108

109109
actor = await make_actor(label='crawler-max-retries', main_func=main)
110110
run_result = await run_actor(actor)

0 commit comments

Comments
 (0)