add inspect_token endpoint, minor cleanup
This commit is contained in:
parent
336941f584
commit
66e0639f48
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"crv": "P-256",
|
||||
"d": "GYAwlBHc2mPsj1lp315HbYOmKNJ7esmO3JAkZVn9nJs",
|
||||
"x": "ToL2HppsTESXQKvp7ED6NMgV4YnwbMeONexNry3KDNQ",
|
||||
"y": "Tt6Q3rxU37KAinUV9PLMlwosNy1t3Bf2VDg5q955AGc"
|
||||
}
|
7
go.mod
7
go.mod
|
@ -1,5 +1,8 @@
|
|||
module git.coolaj86.com/coolaj86/go-mockid
|
||||
|
||||
go 1.12
|
||||
go 1.13
|
||||
|
||||
require github.com/joho/godotenv v1.3.0
|
||||
require (
|
||||
git.rootprojects.org/root/keypairs v0.5.2
|
||||
github.com/joho/godotenv v1.3.0
|
||||
)
|
||||
|
|
2
go.sum
2
go.sum
|
@ -1,2 +1,4 @@
|
|||
git.rootprojects.org/root/keypairs v0.5.2 h1:jr+drUUm/REaCDJTl5gT3kF2PwlXygcLsBZlqoKTZZw=
|
||||
git.rootprojects.org/root/keypairs v0.5.2/go.mod h1:WGI8PadOp+4LjUuI+wNlSwcJwFtY8L9XuNjuO3213HA=
|
||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
|
|
46
mockid.go
46
mockid.go
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
@ -20,22 +21,6 @@ func main() {
|
|||
var port int
|
||||
var host string
|
||||
|
||||
jwkm := map[string]string{
|
||||
"crv": "P-256",
|
||||
"d": "GYAwlBHc2mPsj1lp315HbYOmKNJ7esmO3JAkZVn9nJs",
|
||||
"x": "ToL2HppsTESXQKvp7ED6NMgV4YnwbMeONexNry3KDNQ",
|
||||
"y": "Tt6Q3rxU37KAinUV9PLMlwosNy1t3Bf2VDg5q955AGc",
|
||||
}
|
||||
jwk := &mockid.PrivateJWK{
|
||||
PublicJWK: mockid.PublicJWK{
|
||||
Crv: jwkm["crv"],
|
||||
X: jwkm["x"],
|
||||
Y: jwkm["y"],
|
||||
},
|
||||
D: jwkm["d"],
|
||||
}
|
||||
priv := mockid.ParseKey(jwk)
|
||||
|
||||
portFlag := flag.Int("port", 0, "Port on which the HTTP server should run")
|
||||
urlFlag := flag.String("url", "", "Outward-facing address, such as https://example.com")
|
||||
prefixFlag := flag.String("jwkspath", "", "The path to the JWKs storage directory")
|
||||
|
@ -52,6 +37,31 @@ func main() {
|
|||
os.Exit(1)
|
||||
}
|
||||
|
||||
jwkpath := "./default.jwk.json"
|
||||
jwkb, err := ioutil.ReadFile(jwkpath)
|
||||
if nil != err {
|
||||
panic(fmt.Errorf("read default jwk %v: %w", jwkpath, err))
|
||||
return
|
||||
}
|
||||
|
||||
jwkm := map[string]string{}
|
||||
err = json.Unmarshal(jwkb, &jwkm)
|
||||
if nil != err {
|
||||
// TODO delete the bad file?
|
||||
panic(fmt.Errorf("unmarshal jwk %v: %w", string(jwkb), err))
|
||||
return
|
||||
}
|
||||
|
||||
jwk := &mockid.PrivateJWK{
|
||||
PublicJWK: mockid.PublicJWK{
|
||||
Crv: jwkm["crv"],
|
||||
X: jwkm["x"],
|
||||
Y: jwkm["y"],
|
||||
},
|
||||
D: jwkm["d"],
|
||||
}
|
||||
priv := mockid.ParseKey(jwk)
|
||||
|
||||
if nil != urlFlag && "" != *urlFlag {
|
||||
host = *urlFlag
|
||||
} else {
|
||||
|
@ -64,7 +74,7 @@ func main() {
|
|||
} else {
|
||||
jwksPrefix = "public-jwks"
|
||||
}
|
||||
err := os.MkdirAll(jwksPrefix, 0755)
|
||||
err = os.MkdirAll(jwksPrefix, 0755)
|
||||
if nil != err {
|
||||
fmt.Fprintf(os.Stderr, "couldn't write %q: %s", jwksPrefix, err)
|
||||
os.Exit(1)
|
||||
|
@ -72,7 +82,7 @@ func main() {
|
|||
|
||||
mockid.Route(jwksPrefix, priv, jwk)
|
||||
|
||||
fs := http.FileServer(http.Dir("public"))
|
||||
fs := http.FileServer(http.Dir("./public"))
|
||||
http.Handle("/", fs)
|
||||
/*
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
111
mockid/mockid.go
111
mockid/mockid.go
|
@ -18,6 +18,9 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.rootprojects.org/root/keypairs"
|
||||
"git.rootprojects.org/root/keypairs/keyfetch"
|
||||
)
|
||||
|
||||
type PrivateJWK struct {
|
||||
|
@ -111,6 +114,112 @@ func Route(jwksPrefix string, priv *ecdsa.PrivateKey, jwk *PrivateJWK) {
|
|||
fmt.Fprintf(w, token)
|
||||
})
|
||||
|
||||
http.HandleFunc("/inspect_token", func(w http.ResponseWriter, r *http.Request) {
|
||||
token := r.Header.Get("Authorization")
|
||||
log.Printf("%s %s %s\n", r.Method, r.URL.Path, token)
|
||||
|
||||
if "" == token {
|
||||
token = r.URL.Query().Get("access_token")
|
||||
if "" == token {
|
||||
http.Error(w, "Bad Format: missing Authorization header and 'access_token' query", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
parts := strings.Split(token, " ")
|
||||
if 2 != len(parts) {
|
||||
http.Error(w, "Bad Format: expected Authorization header to be in the format of 'Bearer <Token>'", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
token = parts[1]
|
||||
}
|
||||
|
||||
parts := strings.Split(token, ".")
|
||||
if 3 != len(parts) {
|
||||
http.Error(w, "Bad Format: token should be in the format of <protected-header>.<body>.<signature>", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
protected64 := parts[0]
|
||||
data64 := parts[1]
|
||||
signature64 := parts[2]
|
||||
|
||||
protectedB, err := base64.RawURLEncoding.DecodeString(protected64)
|
||||
if nil != err {
|
||||
http.Error(w, "Bad Format: token's header should be URL-safe base64 encoded", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
dataB, err := base64.RawURLEncoding.DecodeString(data64)
|
||||
if nil != err {
|
||||
http.Error(w, "Bad Format: token's body should be URL-safe base64 encoded", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
// TODO verify signature
|
||||
_, err = base64.RawURLEncoding.DecodeString(signature64)
|
||||
if nil != err {
|
||||
http.Error(w, "Bad Format: token's signature should be URL-safe base64 encoded", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
errors := []string{}
|
||||
|
||||
protected := map[string]interface{}{}
|
||||
err = json.Unmarshal(protectedB, &protected)
|
||||
if nil != err {
|
||||
http.Error(w, "Bad Format: token's header should be URL-safe base64-encoded JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
kid, kidOK := protected["kid"].(string)
|
||||
// TODO parse jwkM
|
||||
_, jwkOK := protected["jwk"]
|
||||
if !kidOK && !jwkOK {
|
||||
errors = append(errors, "must have either header.kid or header.jwk")
|
||||
}
|
||||
|
||||
data := map[string]interface{}{}
|
||||
err = json.Unmarshal(dataB, &data)
|
||||
if nil != err {
|
||||
http.Error(w, "Bad Format: token's body should be URL-safe base64-encoded JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
iss, issOK := data["iss"].(string)
|
||||
if !jwkOK && !issOK {
|
||||
errors = append(errors, "body.iss must exist to complement header.kid")
|
||||
}
|
||||
|
||||
pub, err := keyfetch.OIDCJWK(kid, iss)
|
||||
if nil != err {
|
||||
fmt.Println("couldn't fetch pub key:")
|
||||
fmt.Println(err)
|
||||
}
|
||||
fmt.Println("fetched pub key:")
|
||||
fmt.Println(pub)
|
||||
|
||||
inspected := struct {
|
||||
Public keypairs.PublicKey `json:"public"`
|
||||
Protected map[string]interface{} `json:"protected"`
|
||||
Body map[string]interface{} `json:"body"`
|
||||
Signature string `json:"signature"`
|
||||
Verified bool `json:"verified"`
|
||||
Errors []string `json:"errors"`
|
||||
}{
|
||||
Public: pub,
|
||||
Protected: protected,
|
||||
Body: data,
|
||||
Signature: signature64,
|
||||
Verified: false,
|
||||
Errors: errors,
|
||||
}
|
||||
|
||||
tokenB, err := json.Marshal(inspected)
|
||||
if nil != err {
|
||||
fmt.Println("couldn't serialize inpsected token:")
|
||||
fmt.Println(err)
|
||||
}
|
||||
fmt.Println("serialized inpsected token")
|
||||
fmt.Println(inspected)
|
||||
fmt.Println(string(tokenB))
|
||||
fmt.Fprintf(w, string(tokenB))
|
||||
})
|
||||
|
||||
http.HandleFunc("/authorization_header", func(w http.ResponseWriter, r *http.Request) {
|
||||
log.Printf("%s %s\n", r.Method, r.URL.Path)
|
||||
|
||||
|
@ -382,7 +491,7 @@ func GenToken(host string, priv *ecdsa.PrivateKey, query url.Values) (string, st
|
|||
sb = append([]byte{0}, sb...)
|
||||
}
|
||||
sig64 := base64.RawURLEncoding.EncodeToString(append(rb, sb...))
|
||||
token := fmt.Sprintf(`%s.%s.%s`, protected64, payload64, sig64)
|
||||
token := fmt.Sprintf("%s.%s.%s\n", protected64, payload64, sig64)
|
||||
return protected, payload, token
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue