diff --git a/cmd/github-dashboard/main.go b/cmd/github-dashboard/main.go
new file mode 100644
index 0000000..e4250ba
--- /dev/null
+++ b/cmd/github-dashboard/main.go
@@ -0,0 +1,7 @@
+package main
+
+import "github.com/go-i2p/go-github-dashboard/pkg/cmd"
+
+func main() {
+ cmd.Execute()
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..8840427
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,40 @@
+module github.com/go-i2p/go-github-dashboard
+
+go 1.24.2
+
+require (
+ github.com/google/go-github/v58 v58.0.0
+ github.com/hashicorp/go-retryablehttp v0.7.7
+ github.com/mmcdole/gofeed v1.3.0
+ github.com/russross/blackfriday/v2 v2.1.0
+ github.com/spf13/cobra v1.9.1
+ github.com/spf13/viper v1.20.1
+ golang.org/x/oauth2 v0.30.0
+)
+
+require (
+ github.com/PuerkitoBio/goquery v1.8.0 // indirect
+ github.com/andybalholm/cascadia v1.3.1 // indirect
+ github.com/fsnotify/fsnotify v1.8.0 // indirect
+ github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
+ github.com/google/go-querystring v1.1.0 // indirect
+ github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
+ github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/json-iterator/go v1.1.12 // indirect
+ github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23 // indirect
+ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+ github.com/modern-go/reflect2 v1.0.2 // indirect
+ github.com/pelletier/go-toml/v2 v2.2.3 // indirect
+ github.com/sagikazarmark/locafero v0.7.0 // indirect
+ github.com/sourcegraph/conc v0.3.0 // indirect
+ github.com/spf13/afero v1.12.0 // indirect
+ github.com/spf13/cast v1.7.1 // indirect
+ github.com/spf13/pflag v1.0.6 // indirect
+ github.com/subosito/gotenv v1.6.0 // indirect
+ go.uber.org/atomic v1.9.0 // indirect
+ go.uber.org/multierr v1.9.0 // indirect
+ golang.org/x/net v0.33.0 // indirect
+ golang.org/x/sys v0.29.0 // indirect
+ golang.org/x/text v0.21.0 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..fedfec7
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,103 @@
+github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0gta/U=
+github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI=
+github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c=
+github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
+github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
+github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
+github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
+github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
+github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
+github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
+github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-github/v58 v58.0.0 h1:Una7GGERlF/37XfkPwpzYJe0Vp4dt2k1kCjlxwjIvzw=
+github.com/google/go-github/v58 v58.0.0/go.mod h1:k4hxDKEfoWpSqFlc8LTpGd9fu2KrV1YAa6Hi6FmDNY4=
+github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
+github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
+github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
+github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
+github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
+github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
+github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mmcdole/gofeed v1.3.0 h1:5yn+HeqlcvjMeAI4gu6T+crm7d0anY85+M+v6fIFNG4=
+github.com/mmcdole/gofeed v1.3.0/go.mod h1:9TGv2LcJhdXePDzxiuMnukhV2/zb6VtnZt1mS+SjkLE=
+github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23 h1:Zr92CAlFhy2gL+V1F+EyIuzbQNbSgP4xhTODZtrXUtk=
+github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23/go.mod h1:v+25+lT2ViuQ7mVxcncQ8ch1URund48oH+jhjiwEgS8=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
+github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
+github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
+github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
+github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
+github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
+github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
+github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
+github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
+github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
+github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
+github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
+github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
+github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
+github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
+go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
+go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
+go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
+golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
+golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
+golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
+golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
+golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
+golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/pkg/api/cache.go b/pkg/api/cache.go
new file mode 100644
index 0000000..fedd73d
--- /dev/null
+++ b/pkg/api/cache.go
@@ -0,0 +1,137 @@
+package api
+
+import (
+ "encoding/gob"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+ "time"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+)
+
+// CacheItem represents a cached item with its expiration time
+type CacheItem struct {
+ Value interface{}
+ Expiration time.Time
+}
+
+// Cache provides a simple caching mechanism for API responses
+type Cache struct {
+ items map[string]CacheItem
+ mutex sync.RWMutex
+ dir string
+ ttl time.Duration
+}
+
+// NewCache creates a new cache with the given directory and TTL
+func NewCache(config *types.Config) *Cache {
+ // Register types for gob encoding
+ gob.Register([]types.Repository{})
+ gob.Register([]types.PullRequest{})
+ gob.Register([]types.Issue{})
+ gob.Register([]types.Discussion{})
+
+ cache := &Cache{
+ items: make(map[string]CacheItem),
+ dir: config.CacheDir,
+ ttl: config.CacheTTL,
+ }
+
+ // Load cache from disk
+ cache.loadCache()
+
+ return cache
+}
+
+// Get retrieves a value from the cache
+func (c *Cache) Get(key string) (interface{}, bool) {
+ c.mutex.RLock()
+ defer c.mutex.RUnlock()
+
+ item, found := c.items[key]
+ if !found {
+ return nil, false
+ }
+
+ // Check if the item has expired
+ if time.Now().After(item.Expiration) {
+ return nil, false
+ }
+
+ return item.Value, true
+}
+
+// Set stores a value in the cache
+func (c *Cache) Set(key string, value interface{}) {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ c.items[key] = CacheItem{
+ Value: value,
+ Expiration: time.Now().Add(c.ttl),
+ }
+
+ // Save the cache to disk (in a separate goroutine to avoid blocking)
+ go c.saveCache()
+}
+
+// Clear clears all items from the cache
+func (c *Cache) Clear() {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ c.items = make(map[string]CacheItem)
+ go c.saveCache()
+}
+
+// saveCache saves the cache to disk
+func (c *Cache) saveCache() {
+ c.mutex.RLock()
+ defer c.mutex.RUnlock()
+
+ cacheFile := filepath.Join(c.dir, "cache.gob")
+ file, err := os.Create(cacheFile)
+ if err != nil {
+ fmt.Printf("Error creating cache file: %v\n", err)
+ return
+ }
+ defer file.Close()
+
+ encoder := gob.NewEncoder(file)
+ err = encoder.Encode(c.items)
+ if err != nil {
+ fmt.Printf("Error encoding cache: %v\n", err)
+ }
+}
+
+// loadCache loads the cache from disk
+func (c *Cache) loadCache() {
+ cacheFile := filepath.Join(c.dir, "cache.gob")
+ file, err := os.Open(cacheFile)
+ if err != nil {
+ // If the file doesn't exist, that's not an error
+ if !os.IsNotExist(err) {
+ fmt.Printf("Error opening cache file: %v\n", err)
+ }
+ return
+ }
+ defer file.Close()
+
+ decoder := gob.NewDecoder(file)
+ err = decoder.Decode(&c.items)
+ if err != nil {
+ fmt.Printf("Error decoding cache: %v\n", err)
+ // If there's an error decoding, start with a fresh cache
+ c.items = make(map[string]CacheItem)
+ }
+
+ // Remove expired items
+ now := time.Now()
+ for key, item := range c.items {
+ if now.After(item.Expiration) {
+ delete(c.items, key)
+ }
+ }
+}
diff --git a/pkg/api/github.go b/pkg/api/github.go
new file mode 100644
index 0000000..6fd542e
--- /dev/null
+++ b/pkg/api/github.go
@@ -0,0 +1,296 @@
+package api
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "net/http"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+ "github.com/google/go-github/v58/github"
+ "github.com/hashicorp/go-retryablehttp"
+ "golang.org/x/oauth2"
+)
+
+// GitHubClient wraps the GitHub API client with additional functionality
+type GitHubClient struct {
+ client *github.Client
+ cache *Cache
+ rateLimited bool
+ config *types.Config
+}
+
+// NewGitHubClient creates a new GitHub API client
+func NewGitHubClient(config *types.Config, cache *Cache) *GitHubClient {
+ var httpClient *http.Client
+
+ // Create a retry client
+ retryClient := retryablehttp.NewClient()
+ retryClient.RetryMax = 3
+ retryClient.Logger = nil // Disable logging from the retry client
+
+ if config.GithubToken != "" {
+ // If token is provided, use it for authentication
+ ts := oauth2.StaticTokenSource(
+ &oauth2.Token{AccessToken: config.GithubToken},
+ )
+ httpClient = oauth2.NewClient(context.Background(), ts)
+ retryClient.HTTPClient = httpClient
+ }
+
+ client := github.NewClient(retryClient.StandardClient())
+
+ return &GitHubClient{
+ client: client,
+ cache: cache,
+ rateLimited: false,
+ config: config,
+ }
+}
+
+// GetRepositories fetches repositories for a user or organization
+func (g *GitHubClient) GetRepositories(ctx context.Context) ([]types.Repository, error) {
+ var allRepos []types.Repository
+
+ cacheKey := "repos_"
+ if g.config.User != "" {
+ cacheKey += g.config.User
+ } else {
+ cacheKey += g.config.Organization
+ }
+
+ // Try to get from cache first
+ if cachedRepos, found := g.cache.Get(cacheKey); found {
+ if g.config.Verbose {
+ log.Println("Using cached repositories")
+ }
+ return cachedRepos.([]types.Repository), nil
+ }
+
+ if g.config.Verbose {
+ log.Println("Fetching repositories from GitHub API")
+ }
+
+ for {
+ if g.config.User != "" {
+ opts := &github.RepositoryListOptions{
+ ListOptions: github.ListOptions{PerPage: 100},
+ Sort: "updated",
+ }
+ repos, resp, err := g.client.Repositories.List(ctx, g.config.User, opts)
+ if err != nil {
+ return nil, fmt.Errorf("error fetching repositories: %w", err)
+ }
+
+ for _, repo := range repos {
+ allRepos = append(allRepos, convertRepository(repo))
+ }
+
+ if resp.NextPage == 0 {
+ break
+ }
+ opts.Page = resp.NextPage
+ } else {
+ opts := &github.RepositoryListByOrgOptions{
+ ListOptions: github.ListOptions{PerPage: 100},
+ Sort: "updated",
+ }
+ repos, resp, err := g.client.Repositories.ListByOrg(ctx, g.config.Organization, opts)
+ if err != nil {
+ return nil, fmt.Errorf("error fetching repositories: %w", err)
+ }
+
+ for _, repo := range repos {
+ allRepos = append(allRepos, convertRepository(repo))
+ }
+
+ if resp.NextPage == 0 {
+ break
+ }
+ opts.Page = resp.NextPage
+ }
+ }
+
+ // Cache the results
+ g.cache.Set(cacheKey, allRepos)
+
+ return allRepos, nil
+}
+
+// GetPullRequests fetches open pull requests for a repository
+func (g *GitHubClient) GetPullRequests(ctx context.Context, owner, repo string) ([]types.PullRequest, error) {
+ var allPRs []types.PullRequest
+ cacheKey := fmt.Sprintf("prs_%s_%s", owner, repo)
+
+ // Try to get from cache first
+ if cachedPRs, found := g.cache.Get(cacheKey); found {
+ if g.config.Verbose {
+ log.Printf("Using cached pull requests for %s/%s", owner, repo)
+ }
+ return cachedPRs.([]types.PullRequest), nil
+ }
+
+ if g.config.Verbose {
+ log.Printf("Fetching pull requests for %s/%s", owner, repo)
+ }
+
+ opts := &github.PullRequestListOptions{
+ State: "open",
+ Sort: "updated",
+ Direction: "desc",
+ ListOptions: github.ListOptions{PerPage: 100},
+ }
+
+ for {
+ prs, resp, err := g.client.PullRequests.List(ctx, owner, repo, opts)
+ if err != nil {
+ return nil, fmt.Errorf("error fetching pull requests: %w", err)
+ }
+
+ for _, pr := range prs {
+ allPRs = append(allPRs, convertPullRequest(pr))
+ }
+
+ if resp.NextPage == 0 {
+ break
+ }
+ opts.Page = resp.NextPage
+ }
+
+ // Cache the results
+ g.cache.Set(cacheKey, allPRs)
+
+ return allPRs, nil
+}
+
+// GetIssues fetches open issues for a repository
+func (g *GitHubClient) GetIssues(ctx context.Context, owner, repo string) ([]types.Issue, error) {
+ var allIssues []types.Issue
+ cacheKey := fmt.Sprintf("issues_%s_%s", owner, repo)
+
+ // Try to get from cache first
+ if cachedIssues, found := g.cache.Get(cacheKey); found {
+ if g.config.Verbose {
+ log.Printf("Using cached issues for %s/%s", owner, repo)
+ }
+ return cachedIssues.([]types.Issue), nil
+ }
+
+ if g.config.Verbose {
+ log.Printf("Fetching issues for %s/%s", owner, repo)
+ }
+
+ opts := &github.IssueListByRepoOptions{
+ State: "open",
+ Sort: "updated",
+ Direction: "desc",
+ ListOptions: github.ListOptions{PerPage: 100},
+ }
+
+ for {
+ issues, resp, err := g.client.Issues.ListByRepo(ctx, owner, repo, opts)
+ if err != nil {
+ return nil, fmt.Errorf("error fetching issues: %w", err)
+ }
+
+ for _, issue := range issues {
+ // Skip pull requests (they appear in the issues API)
+ if issue.PullRequestLinks != nil {
+ continue
+ }
+ allIssues = append(allIssues, convertIssue(issue))
+ }
+
+ if resp.NextPage == 0 {
+ break
+ }
+ opts.Page = resp.NextPage
+ }
+
+ // Cache the results
+ g.cache.Set(cacheKey, allIssues)
+
+ return allIssues, nil
+}
+
+// GetDiscussions fetches recent discussions for a repository
+func (g *GitHubClient) GetDiscussions(ctx context.Context, owner, repo string) ([]types.Discussion, error) {
+ // Note: The GitHub API v3 doesn't have a direct endpoint for discussions
+ // We'll simulate this functionality by retrieving discussions via RSS feed
+ // This will be implemented in the RSS parser
+ return []types.Discussion{}, nil
+}
+
+// Helper functions to convert GitHub API types to our domain types
+func convertRepository(repo *github.Repository) types.Repository {
+ r := types.Repository{
+ Name: repo.GetName(),
+ FullName: repo.GetFullName(),
+ Description: repo.GetDescription(),
+ URL: repo.GetHTMLURL(),
+ Owner: repo.GetOwner().GetLogin(),
+ Stars: repo.GetStargazersCount(),
+ Forks: repo.GetForksCount(),
+ }
+
+ if repo.UpdatedAt != nil {
+ r.LastUpdated = repo.UpdatedAt.Time
+ }
+
+ return r
+}
+
+func convertPullRequest(pr *github.PullRequest) types.PullRequest {
+ pullRequest := types.PullRequest{
+ Number: pr.GetNumber(),
+ Title: pr.GetTitle(),
+ URL: pr.GetHTMLURL(),
+ Author: pr.GetUser().GetLogin(),
+ AuthorURL: pr.GetUser().GetHTMLURL(),
+ Status: pr.GetState(),
+ }
+
+ if pr.CreatedAt != nil {
+ pullRequest.CreatedAt = pr.CreatedAt.Time
+ }
+
+ if pr.UpdatedAt != nil {
+ pullRequest.UpdatedAt = pr.UpdatedAt.Time
+ }
+
+ for _, label := range pr.Labels {
+ pullRequest.Labels = append(pullRequest.Labels, types.Label{
+ Name: label.GetName(),
+ Color: label.GetColor(),
+ })
+ }
+
+ return pullRequest
+}
+
+func convertIssue(issue *github.Issue) types.Issue {
+ i := types.Issue{
+ Number: issue.GetNumber(),
+ Title: issue.GetTitle(),
+ URL: issue.GetHTMLURL(),
+ Author: issue.GetUser().GetLogin(),
+ AuthorURL: issue.GetUser().GetHTMLURL(),
+ }
+
+ if issue.CreatedAt != nil {
+ i.CreatedAt = issue.CreatedAt.Time
+ }
+
+ if issue.UpdatedAt != nil {
+ i.UpdatedAt = issue.UpdatedAt.Time
+ }
+
+ for _, label := range issue.Labels {
+ i.Labels = append(i.Labels, types.Label{
+ Name: label.GetName(),
+ Color: label.GetColor(),
+ })
+ }
+
+ return i
+}
diff --git a/pkg/api/rss.go b/pkg/api/rss.go
new file mode 100644
index 0000000..1370640
--- /dev/null
+++ b/pkg/api/rss.go
@@ -0,0 +1,284 @@
+package api
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "strings"
+ "time"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+ "github.com/mmcdole/gofeed"
+)
+
+// RSSClient handles fetching data from GitHub RSS feeds
+type RSSClient struct {
+ parser *gofeed.Parser
+ cache *Cache
+ config *types.Config
+}
+
+// NewRSSClient creates a new RSS client
+func NewRSSClient(config *types.Config, cache *Cache) *RSSClient {
+ return &RSSClient{
+ parser: gofeed.NewParser(),
+ cache: cache,
+ config: config,
+ }
+}
+
+// GetDiscussionsFromRSS fetches recent discussions from GitHub RSS feed
+func (r *RSSClient) GetDiscussionsFromRSS(ctx context.Context, owner, repo string) ([]types.Discussion, error) {
+ var discussions []types.Discussion
+ cacheKey := fmt.Sprintf("discussions_rss_%s_%s", owner, repo)
+
+ // Try to get from cache first
+ if cachedDiscussions, found := r.cache.Get(cacheKey); found {
+ if r.config.Verbose {
+ log.Printf("Using cached discussions for %s/%s", owner, repo)
+ }
+ return cachedDiscussions.([]types.Discussion), nil
+ }
+
+ if r.config.Verbose {
+ log.Printf("Fetching discussions from RSS for %s/%s", owner, repo)
+ }
+
+ // GitHub discussions RSS feed URL
+ feedURL := fmt.Sprintf("https://github.com/%s/%s/discussions.atom", owner, repo)
+
+ feed, err := r.parser.ParseURLWithContext(feedURL, ctx)
+ if err != nil {
+ // If we can't fetch the feed, just return an empty slice rather than failing
+ if r.config.Verbose {
+ log.Printf("Error fetching discussions feed for %s/%s: %v", owner, repo, err)
+ }
+ return []types.Discussion{}, nil
+ }
+
+ cutoffDate := time.Now().AddDate(0, 0, -30) // Last 30 days
+
+ for _, item := range feed.Items {
+ // Skip items older than 30 days
+ if item.PublishedParsed != nil && item.PublishedParsed.Before(cutoffDate) {
+ continue
+ }
+
+ // Parse the author info
+ author := ""
+ authorURL := ""
+ if item.Author != nil {
+ author = item.Author.Name
+ // Extract the GitHub username from the author name if possible
+ if strings.Contains(author, "(") {
+ parts := strings.Split(author, "(")
+ if len(parts) > 1 {
+ username := strings.TrimSuffix(strings.TrimPrefix(parts[1], "@"), ")")
+ author = username
+ authorURL = fmt.Sprintf("https://github.com/%s", username)
+ }
+ }
+ }
+
+ // Parse the category
+ category := "Discussion"
+ if len(item.Categories) > 0 {
+ category = item.Categories[0]
+ }
+
+ discussion := types.Discussion{
+ Title: item.Title,
+ URL: item.Link,
+ Author: author,
+ AuthorURL: authorURL,
+ Category: category,
+ }
+
+ if item.PublishedParsed != nil {
+ discussion.CreatedAt = *item.PublishedParsed
+ }
+
+ if item.UpdatedParsed != nil {
+ discussion.LastUpdated = *item.UpdatedParsed
+ } else if item.PublishedParsed != nil {
+ discussion.LastUpdated = *item.PublishedParsed
+ }
+
+ discussions = append(discussions, discussion)
+ }
+
+ // Cache the results
+ r.cache.Set(cacheKey, discussions)
+
+ return discussions, nil
+}
+
+// GetIssuesFromRSS fetches recent issues from GitHub RSS feed
+func (r *RSSClient) GetIssuesFromRSS(ctx context.Context, owner, repo string) ([]types.Issue, error) {
+ var issues []types.Issue
+ cacheKey := fmt.Sprintf("issues_rss_%s_%s", owner, repo)
+
+ // Try to get from cache first
+ if cachedIssues, found := r.cache.Get(cacheKey); found {
+ if r.config.Verbose {
+ log.Printf("Using cached issues from RSS for %s/%s", owner, repo)
+ }
+ return cachedIssues.([]types.Issue), nil
+ }
+
+ if r.config.Verbose {
+ log.Printf("Fetching issues from RSS for %s/%s", owner, repo)
+ }
+
+ // GitHub issues RSS feed URL
+ feedURL := fmt.Sprintf("https://github.com/%s/%s/issues.atom", owner, repo)
+
+ feed, err := r.parser.ParseURLWithContext(feedURL, ctx)
+ if err != nil {
+ // If we can't fetch the feed, just return an empty slice
+ if r.config.Verbose {
+ log.Printf("Error fetching issues feed for %s/%s: %v", owner, repo, err)
+ }
+ return []types.Issue{}, nil
+ }
+
+ for _, item := range feed.Items {
+ // Skip pull requests (they appear in the issues feed)
+ if strings.Contains(item.Link, "/pull/") {
+ continue
+ }
+
+ // Parse the issue number from the URL
+ number := 0
+ parts := strings.Split(item.Link, "/issues/")
+ if len(parts) > 1 {
+ fmt.Sscanf(parts[1], "%d", &number)
+ }
+
+ // Parse the author info
+ author := ""
+ authorURL := ""
+ if item.Author != nil {
+ author = item.Author.Name
+ // Extract the GitHub username from the author name if possible
+ if strings.Contains(author, "(") {
+ parts := strings.Split(author, "(")
+ if len(parts) > 1 {
+ username := strings.TrimSuffix(strings.TrimPrefix(parts[1], "@"), ")")
+ author = username
+ authorURL = fmt.Sprintf("https://github.com/%s", username)
+ }
+ }
+ }
+
+ issue := types.Issue{
+ Number: number,
+ Title: item.Title,
+ URL: item.Link,
+ Author: author,
+ AuthorURL: authorURL,
+ }
+
+ if item.PublishedParsed != nil {
+ issue.CreatedAt = *item.PublishedParsed
+ }
+
+ if item.UpdatedParsed != nil {
+ issue.UpdatedAt = *item.UpdatedParsed
+ } else if item.PublishedParsed != nil {
+ issue.UpdatedAt = *item.PublishedParsed
+ }
+
+ // Note: RSS doesn't include labels, so we'll have an empty labels slice
+
+ issues = append(issues, issue)
+ }
+
+ // Cache the results
+ r.cache.Set(cacheKey, issues)
+
+ return issues, nil
+}
+
+// GetPullRequestsFromRSS fetches recent pull requests from GitHub RSS feed
+func (r *RSSClient) GetPullRequestsFromRSS(ctx context.Context, owner, repo string) ([]types.PullRequest, error) {
+ var pullRequests []types.PullRequest
+ cacheKey := fmt.Sprintf("prs_rss_%s_%s", owner, repo)
+
+ // Try to get from cache first
+ if cachedPRs, found := r.cache.Get(cacheKey); found {
+ if r.config.Verbose {
+ log.Printf("Using cached pull requests from RSS for %s/%s", owner, repo)
+ }
+ return cachedPRs.([]types.PullRequest), nil
+ }
+
+ if r.config.Verbose {
+ log.Printf("Fetching pull requests from RSS for %s/%s", owner, repo)
+ }
+
+ // GitHub pull requests RSS feed URL
+ feedURL := fmt.Sprintf("https://github.com/%s/%s/pulls.atom", owner, repo)
+
+ feed, err := r.parser.ParseURLWithContext(feedURL, ctx)
+ if err != nil {
+ // If we can't fetch the feed, just return an empty slice
+ if r.config.Verbose {
+ log.Printf("Error fetching pull requests feed for %s/%s: %v", owner, repo, err)
+ }
+ return []types.PullRequest{}, nil
+ }
+
+ for _, item := range feed.Items {
+ // Parse the PR number from the URL
+ number := 0
+ parts := strings.Split(item.Link, "/pull/")
+ if len(parts) > 1 {
+ fmt.Sscanf(parts[1], "%d", &number)
+ }
+
+ // Parse the author info
+ author := ""
+ authorURL := ""
+ if item.Author != nil {
+ author = item.Author.Name
+ // Extract the GitHub username from the author name if possible
+ if strings.Contains(author, "(") {
+ parts := strings.Split(author, "(")
+ if len(parts) > 1 {
+ username := strings.TrimSuffix(strings.TrimPrefix(parts[1], "@"), ")")
+ author = username
+ authorURL = fmt.Sprintf("https://github.com/%s", username)
+ }
+ }
+ }
+
+ pr := types.PullRequest{
+ Number: number,
+ Title: item.Title,
+ URL: item.Link,
+ Author: author,
+ AuthorURL: authorURL,
+ Status: "open", // All items in the feed are open
+ }
+
+ if item.PublishedParsed != nil {
+ pr.CreatedAt = *item.PublishedParsed
+ }
+
+ if item.UpdatedParsed != nil {
+ pr.UpdatedAt = *item.UpdatedParsed
+ } else if item.PublishedParsed != nil {
+ pr.UpdatedAt = *item.PublishedParsed
+ }
+
+ // Note: RSS doesn't include labels, so we'll have an empty labels slice
+
+ pullRequests = append(pullRequests, pr)
+ }
+
+ // Cache the results
+ r.cache.Set(cacheKey, pullRequests)
+
+ return pullRequests, nil
+}
diff --git a/pkg/cmd/generate.go b/pkg/cmd/generate.go
new file mode 100644
index 0000000..101a608
--- /dev/null
+++ b/pkg/cmd/generate.go
@@ -0,0 +1,219 @@
+// pkg/cmd/generate.go
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "os"
+ "os/signal"
+ "path/filepath"
+ "sync"
+ "syscall"
+ "time"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/api"
+ "github.com/go-i2p/go-github-dashboard/pkg/config"
+ "github.com/go-i2p/go-github-dashboard/pkg/generator"
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+ "github.com/spf13/cobra"
+)
+
+var generateCmd = &cobra.Command{
+ Use: "generate",
+ Short: "Generate the GitHub dashboard",
+ Long: `Fetches GitHub repository data and generates a static dashboard.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return runGenerate()
+ },
+}
+
+func init() {
+ rootCmd.AddCommand(generateCmd)
+}
+
+func runGenerate() error {
+ // Get configuration
+ cfg, err := config.GetConfig()
+ if err != nil {
+ return fmt.Errorf("error with configuration: %w", err)
+ }
+
+ // Create a context with cancellation
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ // Handle interruptions gracefully
+ signalChan := make(chan os.Signal, 1)
+ signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM)
+ go func() {
+ <-signalChan
+ fmt.Println("\nReceived interrupt signal, shutting down gracefully...")
+ cancel()
+ }()
+
+ // Initialize the cache
+ cache := api.NewCache(cfg)
+
+ // Initialize clients
+ githubClient := api.NewGitHubClient(cfg, cache)
+ rssClient := api.NewRSSClient(cfg, cache)
+
+ // Initialize generators
+ mdGenerator, err := generator.NewMarkdownGenerator(cfg)
+ if err != nil {
+ return fmt.Errorf("error creating markdown generator: %w", err)
+ }
+
+ htmlGenerator, err := generator.NewHTMLGenerator(cfg)
+ if err != nil {
+ return fmt.Errorf("error creating HTML generator: %w", err)
+ }
+
+ // Create dashboard data structure
+ dashboard := types.Dashboard{
+ Username: cfg.User,
+ Organization: cfg.Organization,
+ GeneratedAt: time.Now(),
+ }
+
+ // Fetch repositories
+ fmt.Println("Fetching repositories...")
+ repositories, err := githubClient.GetRepositories(ctx)
+ if err != nil {
+ return fmt.Errorf("error fetching repositories: %w", err)
+ }
+
+ fmt.Printf("Found %d repositories\n", len(repositories))
+
+ // Create a wait group for parallel processing
+ var wg sync.WaitGroup
+ reposChan := make(chan types.Repository, len(repositories))
+
+ // Process each repository in parallel
+ for i := range repositories {
+ wg.Add(1)
+ go func(repo types.Repository) {
+ defer wg.Done()
+
+ // Check if context is canceled
+ if ctx.Err() != nil {
+ return
+ }
+
+ owner := repo.Owner
+ repoName := repo.Name
+
+ fmt.Printf("Processing repository: %s/%s\n", owner, repoName)
+
+ // Fetch pull requests from RSS first, fall back to API
+ pullRequests, err := rssClient.GetPullRequestsFromRSS(ctx, owner, repoName)
+ if err != nil || len(pullRequests) == 0 {
+ if cfg.Verbose && err != nil {
+ log.Printf("Error fetching pull requests from RSS for %s/%s: %v, falling back to API", owner, repoName, err)
+ }
+ pullRequests, err = githubClient.GetPullRequests(ctx, owner, repoName)
+ if err != nil {
+ log.Printf("Error fetching pull requests for %s/%s: %v", owner, repoName, err)
+ }
+ }
+
+ // Fetch issues from RSS first, fall back to API
+ issues, err := rssClient.GetIssuesFromRSS(ctx, owner, repoName)
+ if err != nil || len(issues) == 0 {
+ if cfg.Verbose && err != nil {
+ log.Printf("Error fetching issues from RSS for %s/%s: %v, falling back to API", owner, repoName, err)
+ }
+ issues, err = githubClient.GetIssues(ctx, owner, repoName)
+ if err != nil {
+ log.Printf("Error fetching issues for %s/%s: %v", owner, repoName, err)
+ }
+ }
+
+ // Fetch discussions (only available through RSS)
+ discussions, err := rssClient.GetDiscussionsFromRSS(ctx, owner, repoName)
+ if err != nil {
+ log.Printf("Error fetching discussions for %s/%s: %v", owner, repoName, err)
+ }
+
+ // Update the repository with the fetched data
+ repo.PullRequests = pullRequests
+ repo.Issues = issues
+ repo.Discussions = discussions
+
+ // Send the updated repository to the channel
+ reposChan <- repo
+ }(repositories[i])
+ }
+
+ // Close the channel when all goroutines are done
+ go func() {
+ wg.Wait()
+ close(reposChan)
+ }()
+
+ // Collect results
+ var processedRepos []types.Repository
+ for repo := range reposChan {
+ processedRepos = append(processedRepos, repo)
+ }
+
+ // Sort repositories by name (for consistent output)
+ dashboard.Repositories = processedRepos
+
+ // Count totals
+ for _, repo := range dashboard.Repositories {
+ dashboard.TotalPRs += len(repo.PullRequests)
+ dashboard.TotalIssues += len(repo.Issues)
+ dashboard.TotalDiscussions += len(repo.Discussions)
+ }
+
+ // Generate markdown files
+ fmt.Println("Generating markdown files...")
+ markdownPaths, err := mdGenerator.GenerateAllRepositoriesMarkdown(dashboard)
+ if err != nil {
+ return fmt.Errorf("error generating markdown files: %w", err)
+ }
+
+ // Convert markdown to HTML
+ fmt.Println("Converting markdown to HTML...")
+ _, err = htmlGenerator.ConvertAllMarkdownToHTML(markdownPaths)
+ if err != nil {
+ return fmt.Errorf("error converting markdown to HTML: %w", err)
+ }
+
+ // Generate the main HTML dashboard
+ fmt.Println("Generating HTML dashboard...")
+ err = htmlGenerator.GenerateHTML(dashboard)
+ if err != nil {
+ return fmt.Errorf("error generating HTML dashboard: %w", err)
+ }
+
+ // Create a README in the output directory
+ readmePath := filepath.Join(cfg.OutputDir, "README.md")
+ var targetName string
+ if cfg.User != "" {
+ targetName = "@" + cfg.User
+ } else {
+ targetName = cfg.Organization
+ }
+
+ readme := fmt.Sprintf("# GitHub Dashboard\n\nThis dashboard was generated for %s on %s.\n\n",
+ targetName,
+ dashboard.GeneratedAt.Format("January 2, 2006"))
+ readme += fmt.Sprintf("- Total repositories: %d\n", len(dashboard.Repositories))
+ readme += fmt.Sprintf("- Total open pull requests: %d\n", dashboard.TotalPRs)
+ readme += fmt.Sprintf("- Total open issues: %d\n", dashboard.TotalIssues)
+ readme += fmt.Sprintf("- Total recent discussions: %d\n\n", dashboard.TotalDiscussions)
+ readme += "To view the dashboard, open `index.html` in your browser.\n"
+
+ err = os.WriteFile(readmePath, []byte(readme), 0644)
+ if err != nil {
+ log.Printf("Error writing README file: %v", err)
+ }
+
+ fmt.Printf("\nDashboard generated successfully in %s\n", cfg.OutputDir)
+ fmt.Printf("Open %s/index.html in your browser to view the dashboard\n", cfg.OutputDir)
+
+ return nil
+}
diff --git a/pkg/cmd/main.go b/pkg/cmd/main.go
new file mode 100644
index 0000000..49d1d3b
--- /dev/null
+++ b/pkg/cmd/main.go
@@ -0,0 +1,53 @@
+// pkg/cmd/root.go
+package cmd
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/config"
+ "github.com/spf13/cobra"
+ "github.com/spf13/viper"
+)
+
+var rootCmd = &cobra.Command{
+ Use: "go-github-dashboard",
+ Short: "Generate a static GitHub dashboard",
+ Long: `A pure Go command-line application that generates a static
+GitHub dashboard by aggregating repository data from GitHub API
+and RSS feeds, organizing content in a repository-by-repository structure.`,
+ Run: func(cmd *cobra.Command, args []string) {
+ // The root command will just show help
+ cmd.Help()
+ },
+}
+
+// Execute executes the root command
+func Execute() {
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+}
+
+func init() {
+ cobra.OnInitialize(config.InitConfig)
+
+ // Persistent flags for all commands
+ rootCmd.PersistentFlags().StringP("user", "u", "", "GitHub username to generate dashboard for")
+ rootCmd.PersistentFlags().StringP("org", "o", "", "GitHub organization to generate dashboard for")
+ rootCmd.PersistentFlags().StringP("output", "d", "./dashboard", "Output directory for the dashboard")
+ rootCmd.PersistentFlags().StringP("token", "t", "", "GitHub API token (optional, increases rate limits)")
+ rootCmd.PersistentFlags().String("cache-dir", "./.cache", "Directory for caching API responses")
+ rootCmd.PersistentFlags().String("cache-ttl", "1h", "Cache time-to-live duration (e.g., 1h, 30m)")
+ rootCmd.PersistentFlags().BoolP("verbose", "v", false, "Enable verbose output")
+
+ // Bind flags to viper
+ viper.BindPFlag("user", rootCmd.PersistentFlags().Lookup("user"))
+ viper.BindPFlag("org", rootCmd.PersistentFlags().Lookup("org"))
+ viper.BindPFlag("output", rootCmd.PersistentFlags().Lookup("output"))
+ viper.BindPFlag("token", rootCmd.PersistentFlags().Lookup("token"))
+ viper.BindPFlag("cache-dir", rootCmd.PersistentFlags().Lookup("cache-dir"))
+ viper.BindPFlag("cache-ttl", rootCmd.PersistentFlags().Lookup("cache-ttl"))
+ viper.BindPFlag("verbose", rootCmd.PersistentFlags().Lookup("verbose"))
+}
diff --git a/pkg/cmd/version.go b/pkg/cmd/version.go
new file mode 100644
index 0000000..b564f51
--- /dev/null
+++ b/pkg/cmd/version.go
@@ -0,0 +1,30 @@
+// pkg/cmd/version.go
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+)
+
+// Version information
+var (
+ Version = "0.1.0"
+ BuildDate = "unknown"
+ Commit = "unknown"
+)
+
+var versionCmd = &cobra.Command{
+ Use: "version",
+ Short: "Print the version number",
+ Long: `Print the version, build date, and commit hash.`,
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("go-github-dashboard version %s\n", Version)
+ fmt.Printf("Build date: %s\n", BuildDate)
+ fmt.Printf("Commit: %s\n", Commit)
+ },
+}
+
+func init() {
+ rootCmd.AddCommand(versionCmd)
+}
diff --git a/pkg/config/config.go b/pkg/config/config.go
new file mode 100644
index 0000000..0cacf7b
--- /dev/null
+++ b/pkg/config/config.go
@@ -0,0 +1,76 @@
+package config
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "time"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+ "github.com/spf13/viper"
+)
+
+// InitConfig initializes the Viper configuration
+func InitConfig() {
+ // Set default values
+ viper.SetDefault("output", "./dashboard")
+ viper.SetDefault("cache-dir", "./.cache")
+ viper.SetDefault("cache-ttl", "1h")
+ viper.SetDefault("verbose", false)
+
+ // Environment variables
+ viper.SetEnvPrefix("GITHUB_DASHBOARD") // will convert to GITHUB_DASHBOARD_*
+ viper.AutomaticEnv()
+
+ // Check for token in environment
+ if token := os.Getenv("GITHUB_TOKEN"); token != "" && viper.GetString("token") == "" {
+ viper.Set("token", token)
+ }
+}
+
+// GetConfig builds and validates the configuration from Viper
+func GetConfig() (*types.Config, error) {
+ cacheTTL, err := time.ParseDuration(viper.GetString("cache-ttl"))
+ if err != nil {
+ return nil, errors.New("invalid cache-ttl format: use a valid duration string (e.g., 1h, 30m)")
+ }
+
+ config := &types.Config{
+ User: viper.GetString("user"),
+ Organization: viper.GetString("org"),
+ OutputDir: viper.GetString("output"),
+ GithubToken: viper.GetString("token"),
+ CacheDir: viper.GetString("cache-dir"),
+ CacheTTL: cacheTTL,
+ Verbose: viper.GetBool("verbose"),
+ }
+
+ // Validate config
+ if config.User == "" && config.Organization == "" {
+ return nil, errors.New("either user or organization must be specified")
+ }
+
+ if config.User != "" && config.Organization != "" {
+ return nil, errors.New("only one of user or organization can be specified")
+ }
+
+ // Create output directory if it doesn't exist
+ err = os.MkdirAll(config.OutputDir, 0755)
+ if err != nil {
+ return nil, err
+ }
+
+ // Create repositories directory
+ err = os.MkdirAll(filepath.Join(config.OutputDir, "repositories"), 0755)
+ if err != nil {
+ return nil, err
+ }
+
+ // Create cache directory if it doesn't exist
+ err = os.MkdirAll(config.CacheDir, 0755)
+ if err != nil {
+ return nil, err
+ }
+
+ return config, nil
+}
diff --git a/pkg/generator/html.go b/pkg/generator/html.go
new file mode 100644
index 0000000..fd75333
--- /dev/null
+++ b/pkg/generator/html.go
@@ -0,0 +1,558 @@
+package generator
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+ "github.com/russross/blackfriday/v2"
+)
+
+// HTMLGenerator handles the generation of HTML files
+type HTMLGenerator struct {
+ outputDir string
+ template *template.Template
+ verbose bool
+}
+
+// NewHTMLGenerator creates a new HTMLGenerator
+func NewHTMLGenerator(config *types.Config) (*HTMLGenerator, error) {
+ // Create the template
+ indexTmpl := `
+
+
+
+
+ GitHub Dashboard {{if .Username}}for @{{.Username}}{{else}}for {{.Organization}}{{end}}
+
+
+
+
+
+
+ {{len .Repositories}} repositories
+ {{.TotalPRs}} open pull requests
+ {{.TotalIssues}} open issues
+ {{.TotalDiscussions}} recent discussions
+
+ Generated on {{.GeneratedAt.Format "January 2, 2006 at 15:04"}}
+
+
+
+
+
Repositories
+
+ {{range .Repositories}}
+
+
+
+
+
+
+
{{if .Description}}{{.Description}}{{else}}No description provided.{{end}}
+
+
+
+ {{if .PullRequests}}
+
+
+
+
+
+
+
+ Title |
+ Author |
+ Updated |
+ Labels |
+
+
+
+ {{range .PullRequests}}
+
+ {{.Title}} |
+ @{{.Author}} |
+ {{.UpdatedAt.Format "2006-01-02"}} |
+ {{range $i, $label := .Labels}}{{if $i}}, {{end}}{{$label.Name}}{{else}}none{{end}} |
+
+ {{end}}
+
+
+
+
+ {{end}}
+
+ {{if .Issues}}
+
+
+
+
+
+
+
+ Title |
+ Author |
+ Updated |
+ Labels |
+
+
+
+ {{range .Issues}}
+
+ {{.Title}} |
+ @{{.Author}} |
+ {{.UpdatedAt.Format "2006-01-02"}} |
+ {{range $i, $label := .Labels}}{{if $i}}, {{end}}{{$label.Name}}{{else}}none{{end}} |
+
+ {{end}}
+
+
+
+
+ {{end}}
+
+ {{if .Discussions}}
+
+
+
+
+
+
+
+ Title |
+ Started By |
+ Last Activity |
+ Category |
+
+
+
+ {{range .Discussions}}
+
+ {{.Title}} |
+ @{{.Author}} |
+ {{.LastUpdated.Format "2006-01-02"}} |
+ {{.Category}} |
+
+ {{end}}
+
+
+
+
+ {{end}}
+
+
+
+
+
+ {{end}}
+
+
+
+
+
+`
+
+ tmpl, err := template.New("index.html").Parse(indexTmpl)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing HTML template: %w", err)
+ }
+
+ return &HTMLGenerator{
+ outputDir: config.OutputDir,
+ template: tmpl,
+ verbose: config.Verbose,
+ }, nil
+}
+
+// GenerateCSS generates the CSS file for the dashboard
+func (g *HTMLGenerator) GenerateCSS() error {
+ css := `/* Base styles */
+:root {
+ --primary-color: #0366d6;
+ --secondary-color: #586069;
+ --background-color: #ffffff;
+ --border-color: #e1e4e8;
+ --pr-color: #28a745;
+ --issue-color: #d73a49;
+ --discussion-color: #6f42c1;
+ --hover-color: #f6f8fa;
+ --font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif;
+}
+
+* {
+ box-sizing: border-box;
+ margin: 0;
+ padding: 0;
+}
+
+body {
+ font-family: var(--font-family);
+ line-height: 1.5;
+ color: #24292e;
+ background-color: var(--background-color);
+ padding: 20px;
+ max-width: 1200px;
+ margin: 0 auto;
+}
+
+/* Header styles */
+header {
+ margin-bottom: 30px;
+ padding-bottom: 20px;
+ border-bottom: 1px solid var(--border-color);
+}
+
+header h1 {
+ margin-bottom: 10px;
+}
+
+.dashboard-stats {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 15px;
+ margin-bottom: 10px;
+}
+
+.dashboard-stats span {
+ background-color: #f1f8ff;
+ border-radius: 20px;
+ padding: 5px 12px;
+ font-size: 14px;
+}
+
+.generated-at {
+ font-size: 14px;
+ color: var(--secondary-color);
+}
+
+/* Repository styles */
+.repositories {
+ margin-bottom: 30px;
+}
+
+.repositories h2 {
+ margin-bottom: 20px;
+}
+
+.repository {
+ margin-bottom: 15px;
+ border: 1px solid var(--border-color);
+ border-radius: 6px;
+ overflow: hidden;
+}
+
+.repo-details {
+ padding: 15px;
+ border-bottom: 1px solid var(--border-color);
+}
+
+.repo-description {
+ margin-bottom: 10px;
+}
+
+.repo-meta {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 15px;
+ font-size: 14px;
+ color: var(--secondary-color);
+}
+
+.repo-links {
+ padding: 10px 15px;
+ font-size: 14px;
+ border-top: 1px solid var(--border-color);
+}
+
+/* Collapsible sections */
+.collapsible {
+ width: 100%;
+}
+
+.toggle {
+ position: absolute;
+ opacity: 0;
+ z-index: -1;
+}
+
+.toggle-label {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: 12px 15px;
+ font-weight: 600;
+ cursor: pointer;
+ background-color: #f6f8fa;
+ position: relative;
+}
+
+.section-label {
+ border-top: 1px solid var(--border-color);
+ font-weight: 500;
+}
+
+.pr-label {
+ color: var(--pr-color);
+}
+
+.issue-label {
+ color: var(--issue-color);
+}
+
+.discussion-label {
+ color: var(--discussion-color);
+}
+
+.toggle-label::after {
+ content: '+';
+ font-size: 18px;
+ transition: transform 0.3s ease;
+}
+
+.toggle:checked ~ .toggle-label::after {
+ content: '−';
+}
+
+.collapsible-content {
+ max-height: 0;
+ overflow: hidden;
+ transition: max-height 0.35s ease;
+}
+
+.toggle:checked ~ .collapsible-content {
+ max-height: 100vh;
+}
+
+/* Table styles */
+.data-table {
+ width: 100%;
+ border-collapse: collapse;
+ font-size: 14px;
+}
+
+.data-table th,
+.data-table td {
+ padding: 8px 15px;
+ text-align: left;
+ border-bottom: 1px solid var(--border-color);
+}
+
+.data-table th {
+ background-color: #f6f8fa;
+ font-weight: 600;
+}
+
+.data-table tr:hover {
+ background-color: var(--hover-color);
+}
+
+/* Links */
+a {
+ color: var(--primary-color);
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+/* Repository name and stats */
+.repo-name {
+ font-size: 16px;
+}
+
+.repo-stats {
+ display: flex;
+ gap: 10px;
+}
+
+.stat {
+ font-size: 12px;
+ padding: 2px 8px;
+ border-radius: 12px;
+ background-color: #f1f8ff;
+ color: var(--primary-color);
+}
+
+/* Footer */
+footer {
+ margin-top: 40px;
+ padding-top: 20px;
+ border-top: 1px solid var(--border-color);
+ font-size: 14px;
+ color: var(--secondary-color);
+ text-align: center;
+}
+
+/* Responsive adjustments */
+@media (max-width: 768px) {
+ .toggle-label {
+ flex-direction: column;
+ align-items: flex-start;
+ gap: 5px;
+ }
+
+ .repo-stats {
+ align-self: flex-start;
+ }
+
+ .data-table {
+ display: block;
+ overflow-x: auto;
+ }
+
+ .dashboard-stats {
+ flex-direction: column;
+ align-items: flex-start;
+ gap: 5px;
+ }
+}`
+
+ err := os.WriteFile(filepath.Join(g.outputDir, "style.css"), []byte(css), 0644)
+ if err != nil {
+ return fmt.Errorf("error writing CSS file: %w", err)
+ }
+
+ return nil
+}
+
+// GenerateHTML generates the main HTML dashboard
+func (g *HTMLGenerator) GenerateHTML(dashboard types.Dashboard) error {
+ if g.verbose {
+ log.Println("Generating HTML dashboard")
+ }
+
+ // Render the template
+ var buf bytes.Buffer
+ err := g.template.Execute(&buf, dashboard)
+ if err != nil {
+ return fmt.Errorf("error executing template: %w", err)
+ }
+
+ // Write the file
+ outputPath := filepath.Join(g.outputDir, "index.html")
+ err = os.WriteFile(outputPath, buf.Bytes(), 0644)
+ if err != nil {
+ return fmt.Errorf("error writing HTML file: %w", err)
+ }
+
+ // Generate the CSS file
+ err = g.GenerateCSS()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// ConvertMarkdownToHTML converts a markdown file to HTML
+func (g *HTMLGenerator) ConvertMarkdownToHTML(markdownPath string) (string, error) {
+ if g.verbose {
+ log.Printf("Converting markdown to HTML: %s", markdownPath)
+ }
+
+ // Read the markdown file
+ markdownContent, err := os.ReadFile(markdownPath)
+ if err != nil {
+ return "", fmt.Errorf("error reading markdown file: %w", err)
+ }
+
+ // Convert the markdown to HTML
+ htmlContent := blackfriday.Run(markdownContent)
+
+ // Determine the output filename
+ baseName := filepath.Base(markdownPath)
+ htmlFileName := strings.TrimSuffix(baseName, filepath.Ext(baseName)) + ".html"
+ htmlPath := filepath.Join(g.outputDir, "repositories", htmlFileName)
+
+ // Create a simple HTML wrapper
+ htmlPage := fmt.Sprintf(`
+
+
+
+
+ %s
+
+
+
+
+ ← Back to Dashboard
+
+ %s
+
+
+`, strings.TrimSuffix(baseName, filepath.Ext(baseName)), string(htmlContent))
+
+ // Write the HTML file
+ err = os.WriteFile(htmlPath, []byte(htmlPage), 0644)
+ if err != nil {
+ return "", fmt.Errorf("error writing HTML file: %w", err)
+ }
+
+ return htmlPath, nil
+}
+
+// ConvertAllMarkdownToHTML converts all markdown files to HTML
+func (g *HTMLGenerator) ConvertAllMarkdownToHTML(markdownPaths []string) ([]string, error) {
+ var htmlPaths []string
+
+ for _, markdownPath := range markdownPaths {
+ htmlPath, err := g.ConvertMarkdownToHTML(markdownPath)
+ if err != nil {
+ return htmlPaths, err
+ }
+ htmlPaths = append(htmlPaths, htmlPath)
+ }
+
+ return htmlPaths, nil
+}
diff --git a/pkg/generator/markdown.go b/pkg/generator/markdown.go
new file mode 100644
index 0000000..68fabdf
--- /dev/null
+++ b/pkg/generator/markdown.go
@@ -0,0 +1,121 @@
+package generator
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "text/template"
+ "time"
+
+ "github.com/go-i2p/go-github-dashboard/pkg/types"
+)
+
+// MarkdownGenerator handles the generation of markdown files
+type MarkdownGenerator struct {
+ outputDir string
+ template *template.Template
+ verbose bool
+}
+
+// NewMarkdownGenerator creates a new MarkdownGenerator
+func NewMarkdownGenerator(config *types.Config) (*MarkdownGenerator, error) {
+ // Create the template
+ tmpl, err := template.New("repository.md.tmpl").Parse(`# Repository: {{.Name}}
+
+{{if .Description}}{{.Description}}{{else}}No description provided.{{end}}
+
+## Open Pull Requests
+
+{{if .PullRequests}}
+| Title | Author | Updated | Labels |
+|-------|--------|---------|--------|
+{{range .PullRequests}}| [{{.Title}}]({{.URL}}) | [{{.Author}}]({{.AuthorURL}}) | {{.UpdatedAt.Format "2006-01-02"}} | {{range $i, $label := .Labels}}{{if $i}}, {{end}}{{$label.Name}}{{else}}*none*{{end}} |
+{{end}}
+{{else}}
+*No open pull requests*
+{{end}}
+
+## Open Issues
+
+{{if .Issues}}
+| Title | Author | Updated | Labels |
+|-------|--------|---------|--------|
+{{range .Issues}}| [{{.Title}}]({{.URL}}) | [{{.Author}}]({{.AuthorURL}}) | {{.UpdatedAt.Format "2006-01-02"}} | {{range $i, $label := .Labels}}{{if $i}}, {{end}}{{$label.Name}}{{else}}*none*{{end}} |
+{{end}}
+{{else}}
+*No open issues*
+{{end}}
+
+## Recent Discussions
+
+{{if .Discussions}}
+| Title | Started By | Last Activity | Category |
+|-------|------------|---------------|----------|
+{{range .Discussions}}| [{{.Title}}]({{.URL}}) | [{{.Author}}]({{.AuthorURL}}) | {{.LastUpdated.Format "2006-01-02"}} | {{.Category}} |
+{{end}}
+{{else}}
+*No recent discussions*
+{{end}}
+
+---
+*Generated at {{.GeneratedAt.Format "2006-01-02 15:04:05"}}*
+`)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing markdown template: %w", err)
+ }
+
+ return &MarkdownGenerator{
+ outputDir: config.OutputDir,
+ template: tmpl,
+ verbose: config.Verbose,
+ }, nil
+}
+
+// GenerateRepositoryMarkdown generates a markdown file for a repository
+func (g *MarkdownGenerator) GenerateRepositoryMarkdown(repo types.Repository) (string, error) {
+ if g.verbose {
+ log.Printf("Generating markdown for repository %s", repo.FullName)
+ }
+
+ // Prepare the template data
+ data := struct {
+ types.Repository
+ GeneratedAt time.Time
+ }{
+ Repository: repo,
+ GeneratedAt: time.Now(),
+ }
+
+ // Render the template
+ var buf bytes.Buffer
+ err := g.template.Execute(&buf, data)
+ if err != nil {
+ return "", fmt.Errorf("error executing template: %w", err)
+ }
+
+ // Write the file
+ outputPath := filepath.Join(g.outputDir, "repositories", fmt.Sprintf("%s.md", repo.Name))
+ err = os.WriteFile(outputPath, buf.Bytes(), 0644)
+ if err != nil {
+ return "", fmt.Errorf("error writing markdown file: %w", err)
+ }
+
+ return outputPath, nil
+}
+
+// GenerateAllRepositoriesMarkdown generates markdown files for all repositories
+func (g *MarkdownGenerator) GenerateAllRepositoriesMarkdown(dashboard types.Dashboard) ([]string, error) {
+ var filePaths []string
+
+ for _, repo := range dashboard.Repositories {
+ path, err := g.GenerateRepositoryMarkdown(repo)
+ if err != nil {
+ return filePaths, err
+ }
+ filePaths = append(filePaths, path)
+ }
+
+ return filePaths, nil
+}