Merge pull request 'Fix bare excepts' (#14) from fix-bare-excepts into main
All checks were successful
ci/woodpecker/push/lint Pipeline was successful
All checks were successful
ci/woodpecker/push/lint Pipeline was successful
Reviewed-on: #14
This commit is contained in:
commit
ad81825bfe
1 changed files with 6 additions and 5 deletions
|
@ -1,5 +1,6 @@
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
@ -70,7 +71,7 @@ with Session(engine) as session:
|
||||||
schema_org = json.loads(schema_org_scriptblock.text)
|
schema_org = json.loads(schema_org_scriptblock.text)
|
||||||
assert schema_org["@context"] == "http://schema.org"
|
assert schema_org["@context"] == "http://schema.org"
|
||||||
last_updated_time = datetime.fromisoformat(schema_org["dateModified"])
|
last_updated_time = datetime.fromisoformat(schema_org["dateModified"])
|
||||||
except:
|
except (AssertionError, JSONDecodeError):
|
||||||
print(
|
print(
|
||||||
"Could not find or load schema.org data for this post, looking up the meta published time"
|
"Could not find or load schema.org data for this post, looking up the meta published time"
|
||||||
)
|
)
|
||||||
|
@ -80,7 +81,7 @@ with Session(engine) as session:
|
||||||
"meta", attrs={"property": "article:published_time"}
|
"meta", attrs={"property": "article:published_time"}
|
||||||
)["content"]
|
)["content"]
|
||||||
)
|
)
|
||||||
except:
|
except NameError:
|
||||||
print("Could not find or load the meta published time for this post")
|
print("Could not find or load the meta published time for this post")
|
||||||
last_updated_time = datetime.now()
|
last_updated_time = datetime.now()
|
||||||
|
|
||||||
|
@ -106,14 +107,14 @@ with Session(engine) as session:
|
||||||
article_record.dfp_targeting_id = article_soup.find(
|
article_record.dfp_targeting_id = article_soup.find(
|
||||||
"script", {"class": "dfp_targeting", "data-key": "id"}
|
"script", {"class": "dfp_targeting", "data-key": "id"}
|
||||||
)["data-value"]
|
)["data-value"]
|
||||||
except:
|
except NameError:
|
||||||
print("Could not find or load IDs for this post")
|
print("Could not find or load IDs for this post")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tags = article_soup.find(
|
tags = article_soup.find(
|
||||||
"script", {"class": "dfp_targeting", "data-key": "tags"}
|
"script", {"class": "dfp_targeting", "data-key": "tags"}
|
||||||
)["data-value"].split("|")
|
)["data-value"].split("|")
|
||||||
except:
|
except NameError:
|
||||||
print("Could not find or load any tags for this article")
|
print("Could not find or load any tags for this article")
|
||||||
tags = []
|
tags = []
|
||||||
|
|
||||||
|
@ -141,7 +142,7 @@ with Session(engine) as session:
|
||||||
article_record.post_id = mastodon_post_result["id"]
|
article_record.post_id = mastodon_post_result["id"]
|
||||||
else:
|
else:
|
||||||
print("Article has already been posted")
|
print("Article has already been posted")
|
||||||
except:
|
except Exception:
|
||||||
print("Could not load a description/post this article")
|
print("Could not load a description/post this article")
|
||||||
|
|
||||||
article_record.article_link = article_link
|
article_record.article_link = article_link
|
||||||
|
|
Loading…
Reference in a new issue