Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -331,9 +331,9 @@ async def main():

await crawler.run(["https://warehouse-theme-metal.myshopify.com/collections/sales"])
# highlight-next-line
await crawler.export_data_json(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data(path='dataset.json', ensure_ascii=False, indent=2)
# highlight-next-line
await crawler.export_data_csv(path='dataset.csv')
await crawler.export_data(path='dataset.csv')
```

After running the scraper again, there should be two new files in your directory, `dataset.json` and `dataset.csv`, containing all the data. If we peek into the JSON file, it should have indentation.
Expand Down Expand Up @@ -389,8 +389,8 @@ async def main():

# highlight-next-line
crawler.log.info("Exporting data")
await crawler.export_data_json(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data_csv(path='dataset.csv')
await crawler.export_data(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data(path='dataset.csv')

def parse_variant(variant):
text = variant.text.strip()
Expand Down Expand Up @@ -500,7 +500,7 @@ If you export the dataset as JSON, it should look something like this:
})

await crawler.run(["https://www.f1academy.com/Racing-Series/Drivers"])
await crawler.export_data_json(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data(path='dataset.json', ensure_ascii=False, indent=2)

if __name__ == '__main__':
asyncio.run(main())
Expand Down Expand Up @@ -598,7 +598,7 @@ When navigating to the first IMDb search result, you might find it helpful to kn
})

await crawler.run(["https://www.netflix.com/tudum/top10"])
await crawler.export_data_json(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data(path='dataset.json', ensure_ascii=False, indent=2)

if __name__ == '__main__':
asyncio.run(main())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,8 @@ async def main():
await crawler.run(["https://warehouse-theme-metal.myshopify.com/collections/sales"])

crawler.log.info("Exporting data")
await crawler.export_data_json(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data_csv(path='dataset.csv')
await crawler.export_data(path='dataset.json', ensure_ascii=False, indent=2)
await crawler.export_data(path='dataset.csv')

def parse_variant(variant):
text = variant.text.strip()
Expand Down
Loading