I'm wirting a go programm that needs to login to a website to get some data. The login process is done, but now i have the problem that i don't get access to the protected page with the cookie i got from the login form. After examing them and comparing to the ones my browser gets, i noticed that my program gets cookies with "empty" timestamps. Can someone point out, how i get the cookie with a correct timestamp? That would be fantastic.
This is my code:
package main
import (
"fmt"
"html"
"io/ioutil"
"log"
"net/http"
"net/http/cookiejar"
"net/url"
"regexp"
"strings"
"time"
)
var CookieJar *cookiejar.Jar
var httpClient *http.Client
func dbgPrintCurCookies(CurCookies []*http.Cookie) {
var cookieNum int = len(CurCookies)
log.Printf("cookieNum=%d", cookieNum)
for i := 0; i < cookieNum; i++ {
var curCk *http.Cookie = CurCookies[i]
//log.Printf("curCk.Raw=%s", curCk.Raw)
log.Printf("Cookie [%d]", i)
log.Printf("Name\t=%s", curCk.Name)
log.Printf("Value\t=%s", curCk.Value)
log.Printf("Path\t=%s", curCk.Path)
log.Printf("Domain\t=%s", curCk.Domain)
log.Printf("Expires\t=%s", curCk.Expires)
log.Printf("RawExpires=%s", curCk.RawExpires)
log.Printf("MaxAge\t=%d", curCk.MaxAge)
log.Printf("Secure\t=%t", curCk.Secure)
log.Printf("HttpOnly=%t", curCk.HttpOnly)
log.Printf("Raw\t=%s", curCk.Raw)
log.Printf("Unparsed=%s", curCk.Unparsed)
}
}
func main() {
CookieJar, _ = cookiejar.New(nil)
httpClient := &http.Client{
Jar: CookieJar,
Timeout: 10 * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}}
LSFRedirURL := ""
pw := "
us := ""
LoginURL := "login website ?j_password=" + pw + "&j_username=" + us
GetFinalCookieURL := ""
//get first cookie
nextURL := LSFRedirURL
for i := 0; i < 10; i++ {
resp, _ := httpClient.Get(nextURL)
// fmt.Println("StatusCode:", resp.StatusCode)
// fmt.Println(resp.Request.URL)
if resp.StatusCode == 200 {
// fmt.Println("Done!")
break
} else {
nextURL = resp.Header.Get("Location")
}
}
//safe first cookie
url1, _ := url.Parse("first cookie website")
firstCookie := CookieJar.Cookies(url1)[0]
fmt.Println("First Cookie :\\)")
//getting second cookie and params
// var cam []string
var resp *http.Response
nextURL = LoginURL
for i := 0; i < 10; i++ {
resp, _ = httpClient.Post(nextURL, "", nil)
// fmt.Println("StatusCode:", resp.StatusCode)
// fmt.Println(resp.Request.URL)
// cam = append(cam, nextURL)
if resp.StatusCode == 200 {
fmt.Println("Done!")
break
} else {
nextURL = resp.Header.Get("Location")
}
}
//second cookie
url2, _ := url.Parse("website second cookie is from")
secondCookie := CookieJar.Cookies(url2)[0]
fmt.Println("Second Cookie :\\)")
//params
defer resp.Body.Close()
c, _ := ioutil.ReadAll(resp.Body)
data := html.UnescapeString(string(c))
//fmt.Println(data)
getvalue := regexp.MustCompile("value=\".*\"")
values := getvalue.FindAllStringSubmatch(data, -1)
values[0][0] = strings.TrimSuffix(values[0][0], "\"")
values[0][0] = strings.TrimPrefix(values[0][0], "value=\"")
values[1][0] = strings.TrimSuffix(values[1][0], "\"")
values[1][0] = strings.TrimPrefix(values[1][0], "value=\"")
v := url.Values{
"SAMLResponse": {values[1][0]},
"RelayState": {values[0][0]},
}
body := strings.NewReader(v.Encode())
fmt.Println("Values :\\)")
//adding values and cookies to request
req, _ := http.NewRequest("POST", GetFinalCookieURL, body)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.AddCookie(firstCookie)
req.AddCookie(secondCookie)
resp, _ = httpClient.Do(req)
//we got the real cookie
url3, _ := url.Parse("website i get the cookies for")
dbgPrintCurCookies(CookieJar.Cookies(url3))
finalCookie := CookieJar.Cookies(url3)[0]
finalCookie2 := CookieJar.Cookies(url3)[1]
fmt.Println("StatusCode:", resp.StatusCode)
fmt.Println(resp.Request.URL)
nextURL = resp.Header.Get("Location")
fmt.Println(nextURL)
nextURL = "website i need the cookies for"
req, _ = http.NewRequest("GET", nextURL, nil)
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.AddCookie(finalCookie)
req.AddCookie(finalCookie2)
resp, _ = httpClient.Do(req)
url3, _ = url.Parse("final cookie website")
dbgPrintCurCookies(CookieJar.Cookies(url3))
fmt.Println(resp.StatusCode)
fmt.Println(resp.Request.URL)
defer resp.Body.Close()
data3, _ := ioutil.ReadAll(resp.Body)
fmt.Println(string(data3))
}
And this is an example of what the cookie i get looks like:
2018/02/10 01:55:48 cookieNum=2
2018/02/10 01:55:48 Cookie [0]
2018/02/10 01:55:48 Name =JSESSIONID
2018/02/10 01:55:48 Value =86E2C361905167A1F64FC45C400649F2.stupo1
2018/02/10 01:55:48 Path =
2018/02/10 01:55:48 Domain =
2018/02/10 01:55:48 Expires =0001-01-01 00:00:00 +0000 UTC
2018/02/10 01:55:48 RawExpires=
2018/02/10 01:55:48 MaxAge =0
2018/02/10 01:55:48 Secure =false
2018/02/10 01:55:48 HttpOnly=false
2018/02/10 01:55:48 Raw =
2018/02/10 01:55:48 Unparsed=[]
Edit: Added the complete code. And this is the cookie in the Browser: Cookie in Browser
Cookies come in two flavors: The once you receive in a Set-Cookie
header and the ones you send in a Cookie
header. Only Set-Cookie
header cookies have expiration times and various fields while Cookie
header type cookies are plain name,value tuples. and that's what a cookiejar returns because that's what included in the Cookie header.
The default cookie jar implementation of the stdlib does not provide a mechanism to extract all cookies or fields other than name and value. If you need that information use any other open source cookie jar implementation.
(Note that the whole purpose of the stdlib cookie jar implementation is to transparently handle cookies, even through redirections. It is not a solution to collect information about incomming Set-Cookie header and the various values sent in them.)