From d87b197cc067590148467fd3085be4f4b9e3484d Mon Sep 17 00:00:00 2001 From: AJ ONeal Date: Sun, 23 Jun 2019 03:01:24 -0600 Subject: [PATCH] WIP delete works too now --- again.go | 14 ++-- cmd/again/again.go | 36 +++++++++-- data/jsondb/jsondb.go | 125 ++++++++++++++++++++++++++---------- public/app.js | 33 ++++------ public/index.html | 145 ++++++++++++++++++++++++------------------ 5 files changed, 225 insertions(+), 128 deletions(-) diff --git a/again.go b/again.go index 17acf5b..b5bf757 100644 --- a/again.go +++ b/again.go @@ -208,21 +208,21 @@ func IsAmbiguous(st []int, tzstr string) error { if nil != err { return err } + m := time.Month(st[1]) t1 := time.Date(st[0], m, st[2], st[3], st[4], st[5], st[6], tz) u1 := t1.UTC() - // A better way to do this would probably be to parse the timezone database, but... yeah... - for _, n := range []int{ /*-120, -60,*/ 30, 60, 120} { + // Australia/Lord_Howe has a 30-minute DST + // 60-minute DST is common + // Antarctica/Troll has a 120-minute DST + for _, n := range []int{30, 60, 120} { t2 := time.Date(st[0], m, st[2], st[3], st[4]+n, st[5], st[6], tz) u2 := t2.UTC() if u1.Equal(u2) { - fmt.Println("Ambiguous Time") - fmt.Printf("%s, %s, %+d\n", t1, u1, n) - fmt.Printf("%s, %s, %+d\n", t2, u2, n) - return fmt.Errorf("Ambiguous") + return fmt.Errorf("Ambiguous: %s, %s, %+d\n", t1, t2, n) } } - //ta := + return nil } diff --git a/cmd/again/again.go b/cmd/again/again.go index 81291bd..cc7329d 100644 --- a/cmd/again/again.go +++ b/cmd/again/again.go @@ -77,6 +77,7 @@ func main() { } //mux.Handle("/api/", http.HandlerFunc(handleFunc)) mux.HandleFunc("/api/v0/schedules", s.Handle) + mux.HandleFunc("/api/v0/schedules/", s.Handle) // TODO Filebox FS mux.Handle("/", http.FileServer(http.Dir("./public"))) @@ -88,6 +89,7 @@ func main() { type ScheduleDB interface { List(string) ([]*again.Schedule, error) Set(again.Schedule) (*again.Schedule, error) + Delete(accessID string, id string) (*again.Schedule, error) } type scheduler struct { @@ -107,7 +109,6 @@ func (s *scheduler) Handle(w http.ResponseWriter, r *http.Request) { ctx = context.WithValue(ctx, "token", token) r = r.WithContext(ctx) - fmt.Println("whatever", r.Method, r.URL) switch r.Method { case http.MethodGet: s.List(w, r) @@ -115,6 +116,9 @@ func (s *scheduler) Handle(w http.ResponseWriter, r *http.Request) { case http.MethodPost: s.Create(w, r) return + case http.MethodDelete: + s.Delete(w, r) + return default: http.Error(w, "Not Implemented", http.StatusNotImplemented) return @@ -144,10 +148,8 @@ func (s *scheduler) Create(w http.ResponseWriter, r *http.Request) { br, bw := io.Pipe() b := io.TeeReader(r.Body, bw) go func() { - fmt.Println("reading from reader...") x, _ := ioutil.ReadAll(b) - fmt.Println("cool beans and all") - fmt.Println(string(x)) + fmt.Println("[debug] http body", string(x)) bw.Close() }() decoder := json.NewDecoder(br) @@ -176,3 +178,29 @@ func (s *scheduler) Create(w http.ResponseWriter, r *http.Request) { } w.Write(buf) } + +func (s *scheduler) Delete(w http.ResponseWriter, r *http.Request) { + // TODO validate user + accessID := r.Context().Value("token").(string) + parts := strings.Split(r.URL.Path, "/") + + // ""/"api"/"v0"/"schedules"/":id" + if 5 != len(parts) { + http.Error(w, "Not Found", http.StatusNotFound) + return + } + + id := parts[4] + sched2, err := s.DB.Delete(accessID, id) + if nil != err { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + buf, err := json.Marshal(sched2) + if nil != err { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Write(buf) +} diff --git a/data/jsondb/jsondb.go b/data/jsondb/jsondb.go index 4aad138..b84877a 100644 --- a/data/jsondb/jsondb.go +++ b/data/jsondb/jsondb.go @@ -1,3 +1,9 @@ +// A JSON storage strategy for Go Again +// SQL would be a better choice, but... meh +// +// Note that we use mutexes instead of channels +// because everything is both synchronous and +// sequential. Meh. package jsondb import ( @@ -8,16 +14,20 @@ import ( "fmt" "net/url" "os" + "path/filepath" "strings" + "sync" "time" - again "git.rootprojects.org/root/go-again" + "git.rootprojects.org/root/go-again" ) type JSONDB struct { dburl string path string json *dbjson + mux sync.Mutex + fmux sync.Mutex } type dbjson struct { @@ -31,13 +41,9 @@ func Connect(dburl string) (*JSONDB, error) { } // json:/abspath/to/db.json - fmt.Println("url.Opaque:", u.Opaque) - // json:///abspath/to/db.json - fmt.Println("url.Path:", u.Path) - fmt.Println(u) - path := u.Opaque if "" == path { + // json:///abspath/to/db.json path = u.Path if "" == path { // json:relpath/to/db.json @@ -73,10 +79,14 @@ func Connect(dburl string) (*JSONDB, error) { return nil, fmt.Errorf("Couldn't parse %q as JSON: %s", path, err) } + wd, _ := os.Getwd() + fmt.Println("jsondb:", filepath.Join(wd, path)) return &JSONDB{ dburl: dburl, path: path, json: db, + mux: sync.Mutex{}, + fmux: sync.Mutex{}, }, nil } @@ -92,10 +102,6 @@ type Schedule struct { Webhooks []again.Webhook `json:"webhooks" db"webhooks"` } -func ctcmp(x string, y string) bool { - return 1 == subtle.ConstantTimeCompare([]byte(x), []byte(y)) -} - func (db *JSONDB) List(accessID string) ([]*again.Schedule, error) { schedules := []*again.Schedule{} for i := range db.json.Schedules { @@ -115,30 +121,11 @@ func (db *JSONDB) List(accessID string) ([]*again.Schedule, error) { return schedules, nil } -func (db *JSONDB) get(id string) (int, *Schedule) { - for i := range db.json.Schedules { - schedule := db.json.Schedules[i] - if ctcmp(id, schedule.ID) { - return i, &schedule - } - } - return -1, nil -} - -func genID() (string, error) { - b := make([]byte, 16) - _, err := rand.Read(b) - if nil != err { - return "", err - } - return hex.EncodeToString(b), nil -} - func (db *JSONDB) Set(s again.Schedule) (*again.Schedule, error) { exists := false index := -1 if "" == s.ID { - id, err := genID() + id, err := genID(16) if nil != err { return nil, err } @@ -147,6 +134,7 @@ func (db *JSONDB) Set(s again.Schedule) (*again.Schedule, error) { i, old := db.get(s.ID) index = i exists = nil != old + // TODO constant time bail if !exists || !ctcmp(old.AccessID, s.AccessID) { return nil, fmt.Errorf("invalid id") } @@ -163,9 +151,13 @@ func (db *JSONDB) Set(s again.Schedule) (*again.Schedule, error) { } if exists { + db.mux.Lock() db.json.Schedules[index] = schedule + db.mux.Unlock() } else { + db.mux.Lock() db.json.Schedules = append(db.json.Schedules, schedule) + db.mux.Unlock() } err := db.save(s.AccessID) @@ -176,9 +168,73 @@ func (db *JSONDB) Set(s again.Schedule) (*again.Schedule, error) { return &s, nil } +func (db *JSONDB) Delete(accessID string, id string) (*again.Schedule, error) { + _, old := db.get(id) + exists := nil != old + // TODO constant time bail + if !exists || !ctcmp(old.AccessID, accessID) { + return nil, fmt.Errorf("invalid id") + } + + // Copy everything we keep into its own array + newSchedules := []Schedule{} + for i := range db.json.Schedules { + schedule := db.json.Schedules[i] + if old.ID != schedule.ID { + newSchedules = append(newSchedules, schedule) + } + } + db.mux.Lock() + db.json.Schedules = newSchedules + db.mux.Unlock() + + err := db.save(accessID) + if nil != err { + return nil, err + } + + return &again.Schedule{ + ID: old.ID, + AccessID: old.AccessID, + Date: old.Date, + Time: old.Time, + TZ: old.TZ, + NextRunAt: old.NextRunAt, + Webhooks: old.Webhooks, + }, nil +} + +func ctcmp(x string, y string) bool { + return 1 == subtle.ConstantTimeCompare([]byte(x), []byte(y)) +} + +func (db *JSONDB) get(id string) (int, *Schedule) { + db.mux.Lock() + scheds := db.json.Schedules + db.mux.Unlock() + for i := range scheds { + schedule := scheds[i] + if ctcmp(id, schedule.ID) { + return i, &schedule + } + } + return -1, nil +} + +func genID(n int) (string, error) { + b := make([]byte, n) + _, err := rand.Read(b) + if nil != err { + return "", err + } + return hex.EncodeToString(b), nil +} + func (db *JSONDB) save(accessID string) error { - // TODO per-user files (w/ mutex lock or channel on open and write) - tmppath := db.path + ".tmp" + // TODO per-user files, maybe + // or probably better to spend that time building the postgres adapter + rnd, err := genID(4) + tmppath := db.path + "." + rnd + ".tmp" bakpath := db.path + ".bak" os.Remove(tmppath) // ignore error @@ -194,12 +250,15 @@ func (db *JSONDB) save(accessID string) error { return err } + // TODO could make async and debounce... + // or spend that time on something useful + db.fmux.Lock() + defer db.fmux.Unlock() os.Remove(bakpath) // ignore error err = os.Rename(db.path, bakpath) if nil != err { return err } - err = os.Rename(tmppath, db.path) if nil != err { return err diff --git a/public/app.js b/public/app.js index bc8c9cf..95be2d2 100644 --- a/public/app.js +++ b/public/app.js @@ -50,8 +50,8 @@ newWebhookHeader(ev.target); } else if (ev.target.matches('.js-rm-header')) { rmWebhookHeader(ev.target); - } else if (ev.target.matches('.js-delete') && ev.target.closest('.js-webhook')) { - deleteWebhook(ev.target.closest('.js-webhook')); + } else if (ev.target.matches('.js-delete') && ev.target.closest('.js-schedule')) { + deleteSchedule(ev.target.closest('.js-schedule')); } else { return; } @@ -142,11 +142,12 @@ throw new Error('something bad happened'); } - state.account.schedules.push(resp.data); + state.account.schedules.push(data); displayAccount(state.account); }) .catch(function(e) { + console.error(e); window.alert(e.message); }); }); @@ -168,9 +169,8 @@ var $h = $rmHeader.closest('.js-header'); $h.parentElement.removeChild($h); } - function deleteWebhook($hook) { - var deviceId = $hook.closest('.js-schedule').querySelector('.js-id').value; - var id = $('.js-id', $hook).innerText; + function deleteSchedule($sched) { + var schedId = $('.js-id', $sched).value; var opts = { method: 'DELETE', headers: { @@ -179,27 +179,17 @@ }, cors: true }; - window.fetch('/api/iot/devices/' + deviceId + '/webhooks/' + id, opts).then(function(resp) { + window.fetch('/api/v0/schedules/' + schedId, opts).then(function(resp) { return resp.json().then(function(result) { - if (!result.webhook) { + if (!result.webhooks) { console.error(result); window.alert('something went wrong: ' + JSON.stringify(result)); return; } - var index = -1; - var dev = state.account.devices.filter(function(d, i) { - return d.accessToken == deviceId; - })[0]; - dev.webhooks.some(function(g, i) { - if (g.id === id) { - index = i; - return true; - } + state.account.schedules = state.account.schedules.filter(function(g) { + return g.id !== result.id; }); - if (index > -1) { - dev.webhooks.splice(index, 1); - displayAccount(state.account); - } + displayAccount(state.account); }); }); } @@ -211,6 +201,7 @@ var $devs = $('.js-schedules'); $devs.innerHTML = ''; data.schedules.forEach(function(d) { + console.log('schedule', d); var $dev = $.create($devTpl); $('.js-id', $dev).value = d.id; $('.js-date', $dev).value = d.date; diff --git a/public/index.html b/public/index.html index 8f1c0cd..74953b6 100644 --- a/public/index.html +++ b/public/index.html @@ -15,56 +15,62 @@ -
 
-