Compare commits

...

3 Commits

Author SHA1 Message Date
Ryan Rix 61d4fe3dfe add a shell.nix 2021-10-23 16:21:32 -07:00
Ryan Rix 0d5a440b56 trying to get HTML previews links in element 2021-10-23 16:20:50 -07:00
Ryan Rix 7b84dd567b py3 2021-10-23 16:20:23 -07:00
3 changed files with 33 additions and 15 deletions

1
.envrc Normal file
View File

@ -0,0 +1 @@
use nix

View File

@ -11,7 +11,7 @@ from bisect import bisect_right
import re
import string
import datetime
from cStringIO import StringIO
from io import StringIO
import arrow
import subprocess
@ -19,7 +19,6 @@ import sys
import os
from os import path
import shutil
import urllib2
import hashlib
import markdown, feedgen.feed as feed
@ -102,9 +101,9 @@ def setup_afd_feed(result_dir, afd_entries):
afd_feed.description("NWS Seattle Area Forecast Discussion")
current = None
current_md = None
current_html = ""
for afd_entry in sorted(afd_entries, reverse = False, key=lambda e: e["timestamp"] ):
for afd_entry in sorted(afd_entries, reverse = True, key=lambda e: e["timestamp"] ):
eid = afd_entry["timestamp"].strftime("%y-%m-%d-%H%m")
if not current:
afd_feed.updated(afd_entry["timestamp"])
@ -117,23 +116,30 @@ def setup_afd_feed(result_dir, afd_entries):
with open(entry_md_file, "w") as md_out:
md_out.write(entry_md)
item = afd_feed.add_entry()
md = markdown.markdown( entry_md )
entry_html = markdown.markdown( entry_md )
entry_html_file = path.join(result_dir, eid + ".html")
logging.info("Writing entry html file: %s", entry_html_file)
with open(entry_html_file, "w") as html_out:
html_out.write(entry_html)
if not current_md:
current_md = md
item = afd_feed.add_entry()
if not current_html:
current_html = entry_html
item.title(pformat_time(afd_entry["timestamp"]))
item.link(href=("https://afd.fontkeming.fail/SEW/" + eid + ".md"))
item.description(md)
item.link(href=("https://afd.fontkeming.fail/SEW/" + eid + ".html"))
item.description(entry_html)
item.summary(entry_html) # XXX: would be nice to only have the summary here...
item.pubDate(afd_entry["timestamp"])
item.updated(afd_entry["timestamp"])
item.guid(eid)
item.id(eid)
logging.info("Writing current: %s", current)
with open( path.join(result_dir, "current.md"), "w") as md_out:
md_out.write(current_md)
with open( path.join(result_dir, "index.html"), "w") as latest_out:
latest_out.write(current_html)
logging.info("Rendering feed file: %s", path.join(result_dir, "AFDSEW.xml"))
@ -143,17 +149,17 @@ def setup_afd_feed(result_dir, afd_entries):
def fetch_feed_files(search_prefix):
feed_files = glob.glob(path.join(search_prefix+"/raw_SEW/AFDSEW.*"))
feed_files = glob.glob(search_prefix+"/raw_SEW/AFDSEW.*")
return feed_files
def is_new_afd(text, search_prefix):
feed_files = fetch_feed_files(search_prefix)
new_hash = hashlib.sha224(text).hexdigest()
new_hash = hashlib.sha224(text.encode('utf8')).hexdigest()
for fname in feed_files:
with open(fname, "r") as f:
hash = hashlib.sha224(f.read()).hexdigest()
hash = hashlib.sha224(f.read().encode('utf8')).hexdigest()
if hash == new_hash:
return False
@ -182,6 +188,7 @@ def fetch(prefix):
url = "https://www.wrh.noaa.gov/total_forecast/getprod.php?" + \
"new&wfo=sew&sid=SEW&pil=AFD&toggle=textonly"
rsp = subprocess.check_output("curl '{url}'".format(url=url), shell=True)
rsp = rsp.decode()
afd = parse_afd(rsp)
ts = afd.get("timestamp")

10
shell.nix Normal file
View File

@ -0,0 +1,10 @@
let
pkgs = import <nixpkgs> {};
in pkgs.mkShell {
packages = [
pkgs.python3
];
shellHook = ''
source env/bin/activate
'';
}