CA
Crawlee & Apify•2y ago
ambitious-aqua

Function not executing for a reason.

async def main():


async with Actor:
input_data = await Actor.get_input()
urls = input_data['websites']
print(urls)




async with aiohttp.ClientSession() as session:
tasks = [fetch_emails(session, website) for website in urls]
results = await asyncio.gather(*tasks)
print('here')
dataset = apify_client.dataset()
for email in website_list:
print(email)
dataset.push_data({"email": email})

print(website_list)
print(f"tried to scrape {len(urls)}")
print(f"there is {invalid_url}")
print(f'successfully scraped {len(website_list)} emails ! ')

asyncio.run(main())
async def main():


async with Actor:
input_data = await Actor.get_input()
urls = input_data['websites']
print(urls)




async with aiohttp.ClientSession() as session:
tasks = [fetch_emails(session, website) for website in urls]
results = await asyncio.gather(*tasks)
print('here')
dataset = apify_client.dataset()
for email in website_list:
print(email)
dataset.push_data({"email": email})

print(website_list)
print(f"tried to scrape {len(urls)}")
print(f"there is {invalid_url}")
print(f'successfully scraped {len(website_list)} emails ! ')

asyncio.run(main())
the
async with Actor:
input_data = await Actor.get_input()
urls = input_data['websites']
print(urls)
async with Actor:
input_data = await Actor.get_input()
urls = input_data['websites']
print(urls)
does print the url. But after that, the code stops. The scripts work fine in vsc. I'm trying to integrate it with apify but I'm facing some issues.
3 Replies
like-gold
like-gold•2y ago
hey, try to put the whole code in the Actor context manager: (under async with Actor:). You can read more about Actor lifecycles and alternatives here: https://docs.apify.com/sdk/python/docs/concepts/actor-lifecycle
ambitious-aqua
ambitious-aquaOP•2y ago
Already figured that out but ty anyways
MEE6
MEE6•2y ago
@Kirada just advanced to level 2! Thanks for your contributions! 🎉

Did you find this page helpful?