Initial Commit

This commit is contained in:
Sarah Jamie Lewis 2019-06-25 15:26:21 -07:00
parent 7261b6eed5
commit 03d784d7c4
24 changed files with 332 additions and 0 deletions

2
.gitignore vendored
View File

@ -24,3 +24,5 @@ _testmain.go
*.test
*.prof
latest*
report.html

1
IACR-eprint/feedinfo Normal file
View File

@ -0,0 +1 @@
https://eprint.iacr.org/rss/rss.xml 1440

1
amnesty/feedinfo Normal file
View File

@ -0,0 +1 @@
https://www.amnesty.ca/rss.xml 1440

1
arxiv-cs.CR/feedinfo Normal file
View File

@ -0,0 +1 @@
http://export.arxiv.org/rss/cs.CR 1440

View File

@ -0,0 +1 @@
https://bitcoin.org/en/rss/releases.rss 1440

View File

@ -0,0 +1 @@
https://www.grin-forum.org/c/announce.rss 1440

1
i2pblog/feedinfo Normal file
View File

@ -0,0 +1 @@
https://geti2p.net/el/feed/blog/atom 1440

View File

@ -0,0 +1 @@
https://coindesk.com/author/lcuen/feed 60

View File

@ -0,0 +1 @@
https://techcrunch.com/author/zack-whittaker/feed/ 60

1
katzenpost-blog/feedinfo Normal file
View File

@ -0,0 +1 @@
https://katzenpost.mixnetworks.org/blog/atom.xml 1440

233
main.go Normal file
View File

@ -0,0 +1,233 @@
package main
import (
"bufio"
"encoding/json"
"fmt"
"github.com/grokify/html-strip-tags-go"
"github.com/mmcdole/gofeed"
"github.com/writeas/go-strip-markdown"
"golang.org/x/net/proxy"
"io/ioutil"
"log"
"net/http"
"net/url"
"os"
"path"
"strconv"
"strings"
"time"
)
func fetch(url string, cachepath string) {
fmt.Printf("Fetching [%v]\n", url)
fp := gofeed.NewParser()
feed, err := fp.ParseURL(url)
fmt.Printf("Feed %v %v\n", feed, err)
cache, _ := json.Marshal(feed)
ioutil.WriteFile(cachepath, cache, 0644)
}
func report() map[string]gofeed.Feed {
feedMap := make(map[string]gofeed.Feed)
items, _ := ioutil.ReadDir(".")
for _, item := range items {
if item.IsDir() {
//subitems, _ := ioutil.ReadDir(item.Name())
//for _, subitem := range subitems {
cachepath := path.Join(".", item.Name(), "latest")
var feed gofeed.Feed
data, err := ioutil.ReadFile(cachepath)
if err == nil {
json.Unmarshal(data, &feed)
// fmt.Printf("Feed %v\n", feed)
}
feedMap[item.Name()] = feed
//}
}
}
return feedMap
}
func download(url string, cachepath string) {
fmt.Printf("Fetching [%v]\n", url)
torDialer, err := proxy.SOCKS5("tcp", "127.0.0.1:9050", nil, proxy.Direct)
transportConfig := &http.Transport{
Dial: torDialer.Dial,
}
client := http.Client{
Transport: transportConfig,
CheckRedirect: func(r *http.Request, via []*http.Request) error {
r.URL.Opaque = r.URL.Path
return nil
},
}
resp, err := client.Get(url)
if err == nil {
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
ioutil.WriteFile(cachepath, body, 0644)
}
}
func update() {
items, _ := ioutil.ReadDir(".")
for _, item := range items {
if item.IsDir() {
subitems, _ := ioutil.ReadDir(item.Name())
for _, subitem := range subitems {
if subitem.Name() == "feedinfo" {
// handle file there
filepath := path.Join(".", item.Name(), subitem.Name())
fmt.Println("Processing: " + filepath)
rawfeedinfo, _ := ioutil.ReadFile(filepath)
feedinfo := strings.Split(strings.TrimSpace(string(rawfeedinfo)), " ")
cachepath := path.Join(".", item.Name(), "latest")
info, err := os.Stat(cachepath)
if err == nil {
duration := time.Since(info.ModTime())
cron, _ := strconv.Atoi(feedinfo[1])
// If it has been greater than <cron> minutes since the last fetch, we fetch
if (time.Duration(cron) * time.Minute).Minutes() < duration.Minutes() {
fetch(feedinfo[0], cachepath)
} else {
}
} else {
// First time
fetch(feedinfo[0], cachepath)
}
} else if subitem.Name() == "images" {
filepath := path.Join(".", item.Name(), subitem.Name())
file, err := os.Open(filepath)
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
image := scanner.Text()
imageinfo := strings.Split(image, " ")
uri, _ := url.Parse(imageinfo[0])
cachepath := path.Join(".", item.Name(), "latest-"+path.Base(uri.Path))
info, err := os.Stat(cachepath)
if err == nil {
duration := time.Since(info.ModTime())
cron, _ := strconv.Atoi(imageinfo[1])
// If it has been greater than <cron> minutes since the last fetch, we fetch
if (time.Duration(cron) * time.Minute).Minutes() < duration.Minutes() {
download(imageinfo[0], cachepath)
} else {
}
} else {
// First time
download(imageinfo[0], cachepath)
}
}
}
}
}
}
}
func processItems(format []string, items []*gofeed.Item, dateCheck time.Duration) {
num := 0
for _, item := range items {
if dateCheck == 0 {
processItem(format, *item)
num = 100
} else {
if item.PublishedParsed != nil {
if time.Since(*item.PublishedParsed) < dateCheck {
processItem(format, *item)
num++
}
} else if item.UpdatedParsed != nil {
if time.Since(*item.UpdatedParsed) < dateCheck {
processItem(format, *item)
num++
}
}
}
}
if num == 0 {
fmt.Printf("* Nothing new this %v From %v\n", format[1], format[0])
}
}
// stripString removes html, then potential markdown characters, and then some additional potential markdown
func stripString(input string) string {
return strings.Replace(stripmd.Strip(strip.StripTags(input)), "`", "", -1)
}
func processItem(format []string, item gofeed.Item) {
fmt.Printf("* ")
for i := 2; i < len(format); i++ {
switch format[i] {
case "Title":
fmt.Printf("%v ", stripString(item.Title))
case "Link":
fmt.Printf("[%v](%v)", stripString(item.Link), stripString(item.Link))
case "Description":
fmt.Printf("\n * %v <hr/>", strings.Replace(stripString(item.Description), "\n", "", -1))
}
}
fmt.Printf("\n")
}
func main() {
if len(os.Args) >= 2 {
cmd := os.Args[1]
switch cmd {
case "update":
update()
case "report":
if len(os.Args) == 3 {
feeds := report()
file, err := os.Open(os.Args[2])
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
reportLine := strings.Split(line, " ")
if strings.HasPrefix(reportLine[0], "%") {
// Ignore, this is a comment
} else if strings.HasPrefix(reportLine[0], "#") || strings.HasPrefix(reportLine[0], "<") {
fmt.Printf("%v\n", strings.TrimSpace(line))
} else if len(reportLine) > 2 {
_, exists := feeds[reportLine[0]]
if exists {
if reportLine[1] == "ALL" {
processItems(reportLine, feeds[reportLine[0]].Items, 0)
} else if reportLine[1] == "DAY" { // Only output entries from the last Day
processItems(reportLine, feeds[reportLine[0]].Items, time.Hour*24)
} else if reportLine[1] == "WEEK" { // Only output entries from the last Week
processItems(reportLine, feeds[reportLine[0]].Items, time.Hour*24*7)
} else {
index, _ := strconv.Atoi(reportLine[1])
processItem(reportLine, *feeds[reportLine[0]].Items[index])
}
} else {
log.Fatalf("Report Template Contains Non-Existent Feed %v\n", reportLine[0])
}
} else {
fmt.Printf("\n")
}
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}
}
}
os.Exit(0)
}

1
monero/feedinfo Normal file
View File

@ -0,0 +1 @@
https://web.getmonero.org/feed.xml 1440

View File

@ -0,0 +1 @@
https://privacyinternational.org/rss.xml 1440

1
proof-of-work/feedinfo Normal file
View File

@ -0,0 +1 @@
https://proofofwork.news/feed/ 1440

2
pt-reyes/images Normal file
View File

@ -0,0 +1,2 @@
https://ocean.weather.gov/shtml/pyba01bw.gif 120
https://ocean.weather.gov/shtml/EVPN13.jpg 120

1
reddit-rtlsdr/feedinfo Normal file
View File

@ -0,0 +1 @@
https://www.reddit.com/r/RTLSDR.xml 1440

75
report.template Normal file
View File

@ -0,0 +1,75 @@
<style> body{background:#1A111A;color:#fff;font-family:Sans;padding:0 25% 0 25%;} h2{font-size:14px;} h3{font-size:12px;} a{color:#fff;}</style>
# Daily Report
## Weather
weather 1 Title
weather 2 Title
## Satellite
<img src="./pt-reyes/latest-pyba01bw.gif" width="300" height="300"/>
<img src="./pt-reyes/latest-EVPN13.jpg" width="300" height="300"/>
<hr/>
## In The News
% Not all the journalists I follow have dedicated rss feeds available for their publication (and some don't have a publication)
journalist-zackwhittaker DAY Title Link
journalist-leighcuen DAY Title Link
reveal DAY Title Link
## Activism
seashepherd WEEK Title Link
amnesty WEEK Title Link
privacy-international WEEK Title Link
## Anonymity Blogs
tor-blog DAY Title Link
i2pblog DAY Title Link
katzenpost-blog DAY Title Link
securedrop WEEK Title Link
<hr/>
## Cryptocurrency
### General
proof-of-work WEEK Title Link
### Bitcoin
bitcoin-releases 0 Title Link
### Monero
monero WEEK Title Link
### Zcash
zcash-blog WEEK Title Link
zcash-fnd-blog WEEK Title Link
### Grin
grin-annoucements WEEK Title Link
<hr/>
## New Crypto/Security Papers
% Neither of these offer proper published dates in their feeds
arxiv-cs.CR ALL Title Link Description
IACR-eprint ALL Title Link Description
<hr/>
## Hobbies
### Radio
reddit-rtlsdr DAY Title Link

1
reveal/feedinfo Normal file
View File

@ -0,0 +1 @@
https://www.revealnews.org/feed/ 60

1
seashepherd/feedinfo Normal file
View File

@ -0,0 +1 @@
https://seashepherd.org/feed/ 1440

1
securedrop/feedinfo Normal file
View File

@ -0,0 +1 @@
https://securedrop.org/news/feed/ 1440

1
tor-blog/feedinfo Normal file
View File

@ -0,0 +1 @@
https://blog.torproject.org/rss.xml 1440

1
weather/feedinfo Normal file
View File

@ -0,0 +1 @@
https://weather.gc.ca/rss/city/bc-74_e.xml 60

1
zcash-blog/feedinfo Normal file
View File

@ -0,0 +1 @@
https://electriccoin.co/blog/feed 1440

1
zcash-fnd-blog/feedinfo Normal file
View File

@ -0,0 +1 @@
https://www.zfnd.org/feed.xml 1440