|
|
@@ -26,60 +26,64 @@ class ArchiveDownloader:
|
|
|
]
|
|
|
|
|
|
async def find(self, collection, dt):
|
|
|
- return await self.ia_client.get_snapshot_id_closest_to(collection.url, dt)
|
|
|
-
|
|
|
- async def parse(self, collection, snapshot):
|
|
|
- return await collection.MainPageClass.from_snapshot(snapshot)
|
|
|
-
|
|
|
- async def store(self, page, collection, dt):
|
|
|
- site_id = await self.storage.add_site(collection.url)
|
|
|
- snapshot_id = await self.storage.add_snapshot(site_id, page.snapshot.id, dt)
|
|
|
-
|
|
|
- article_id = await self.storage.add_featured_article(
|
|
|
- page.main_article.article.original
|
|
|
- )
|
|
|
- main_article_snap_id = await self.storage.add_featured_article_snapshot(
|
|
|
- article_id, page.main_article.article
|
|
|
- )
|
|
|
- await self.storage.add_main_article(snapshot_id, main_article_snap_id)
|
|
|
-
|
|
|
- for t in page.top_articles:
|
|
|
- article_id = await self.storage.add_featured_article(t.article.original)
|
|
|
- top_article_snap_id = await self.storage.add_featured_article_snapshot(
|
|
|
- article_id, t.article
|
|
|
- )
|
|
|
- await self.storage.add_top_article(snapshot_id, top_article_snap_id, t)
|
|
|
-
|
|
|
- async def handle_snap(self, collection, dt):
|
|
|
try:
|
|
|
- id_closest = await self.find(collection, dt)
|
|
|
+ return await self.ia_client.get_snapshot_id_closest_to(collection.url, dt)
|
|
|
except SnapshotNotYetAvailable as e:
|
|
|
print(f"Snapshot for {collection.url} @ {dt} not yet available")
|
|
|
- return
|
|
|
+ raise e
|
|
|
except Exception as e:
|
|
|
print(f"Error while trying to find snapshot for {collection.url} @ {dt}")
|
|
|
traceback.print_exception(e)
|
|
|
- return
|
|
|
+ raise e
|
|
|
|
|
|
+ async def fetch(self, snap_id):
|
|
|
try:
|
|
|
- closest = await self.ia_client.fetch(id_closest)
|
|
|
+ return await self.ia_client.fetch(snap_id)
|
|
|
except Exception as e:
|
|
|
- print(f"Error while fetching {id_closest} from {collection} @ {dt}")
|
|
|
+ print(f"Error while fetching {snap_id}")
|
|
|
traceback.print_exception(e)
|
|
|
- return
|
|
|
+ raise e
|
|
|
|
|
|
+ async def parse(self, collection, snapshot):
|
|
|
try:
|
|
|
- main_page = await self.parse(collection, closest)
|
|
|
+ return await collection.MainPageClass.from_snapshot(snapshot)
|
|
|
except Exception as e:
|
|
|
- print(f"Error while parsing {closest} from {collection} @ {dt}")
|
|
|
+ print(f"Error while parsing {closest}")
|
|
|
traceback.print_exception(e)
|
|
|
- return
|
|
|
+ raise e
|
|
|
|
|
|
+ async def store(self, page, collection, dt):
|
|
|
try:
|
|
|
- await self.store(main_page, collection, dt)
|
|
|
+ site_id = await self.storage.add_site(collection.url)
|
|
|
+ snapshot_id = await self.storage.add_snapshot(site_id, page.snapshot.id, dt)
|
|
|
+
|
|
|
+ article_id = await self.storage.add_featured_article(
|
|
|
+ page.main_article.article.original
|
|
|
+ )
|
|
|
+ main_article_snap_id = await self.storage.add_featured_article_snapshot(
|
|
|
+ article_id, page.main_article.article
|
|
|
+ )
|
|
|
+ await self.storage.add_main_article(snapshot_id, main_article_snap_id)
|
|
|
+
|
|
|
+ for t in page.top_articles:
|
|
|
+ article_id = await self.storage.add_featured_article(t.article.original)
|
|
|
+ top_article_snap_id = await self.storage.add_featured_article_snapshot(
|
|
|
+ article_id, t.article
|
|
|
+ )
|
|
|
+ await self.storage.add_top_article(snapshot_id, top_article_snap_id, t)
|
|
|
+
|
|
|
except Exception as e:
|
|
|
print(f"Error while attempting to store {main_page} from {collection} @ {dt}")
|
|
|
traceback.print_exception(e)
|
|
|
+ raise e
|
|
|
+
|
|
|
+ async def handle_snap(self, collection, dt):
|
|
|
+ try:
|
|
|
+ id_closest = await self.find(collection, dt)
|
|
|
+ closest = await self.ia_client.fetch(id_closest)
|
|
|
+ main_page = await self.parse(collection, closest)
|
|
|
+ await self.store(main_page, collection, dt)
|
|
|
+ except Exception as e:
|
|
|
return
|
|
|
|
|
|
|