pq:剩余的连接插槽保留用于非复制超级用户和rds_superuser连接

I keep getting this error:

pq: remaining connection slots are reserved for non-replication superuser and rds_superuser connections

I've read many articles, but that's all only give solution from the database's side, not the golang's. I can't change the database config, the max_connections is already in 3500

the process is: i get the data from calling API, then store the response data to someone's database directly, not by API

ive tried adding time.sleep, reusing http, and pooling http, setting up Transport, but seems this problem has nothing to do with connection.

*i already check, i close the response.Body

GET:

client := &http.Client{
    Transport: &http.Transport{
        MaxIdleConns: 100,
        MaxIdleConnsPerHost: 100,
    },
    Timeout: time.Duration(30) * time.Second,
}

req, err := http.NewRequest("GET", magento_url, nil)
response, err := client.Do(req)

if err != nil {
    fmt.Printf("The HTTP request failed with error %s
", err)
} 

defer response.Body.Close()

body, err := ioutil.ReadAll(response.Body)
if err != nil {
   log.Fatalf("Couldn't parse response body. %+v", err)
}
fmt.Println("Response Body:", string(body)) 

countriesMap := make(map[string]interface{})

err = json.Unmarshal([]byte(body), &countriesMap)
if err != nil {
    panic(err.Error())
}   

for  key,val := range countriesMap {
     if(key=="items"){
        switch concreteVal := val.(type) {
           case []interface{}:
               fmt.Println(key)
               parseArray(val.([]interface{}))

            default:
                fmt.Println(key, ":", concreteVal)

            }
        }
     }

POST:

func parseArray(anArray []interface{}) {
   arrayData := make(map[string]interface{})
   for i, val := range anArray {
       switch concreteVal := val.(type) {
       case map[string]interface{}:
           fmt.Println("Index::", i)

           for key, valLast := range val.(map[string]interface{}) {
               switch concreteValLast := valLast.(type) {

               default:
                   if key=="coupon_code"{
                       arrayData["coupon_code"]=concreteValLast
                   }else if key=="coupon_description"{
                       arrayData["coupon_description"]=concreteValLast
                   }
               }
           }
       default:
           fmt.Println("Index", i, ":", concreteVal)

       }
       dm_connection := []string{"user=",dm_username," password=",dm_password," dbname=",dm_database," port=",dm_port," host=",dm_host};
       db, err := sql.Open("postgres", strings.Join(dm_connection,""))
       if err != nil {
         panic(message)
       }
      defer db.Close()

      var dataField ApiData

      checkingData := `SELECT coupon_code FROM discount WHERE coupon_code = $1`
      err = db.QueryRow(checkingData, arrayData["coupon_code"]).Scan(&dataField.Coupon_code)

            if err != nil {
                if err == sql.ErrNoRows {
                    //INSERT DM
                    InsertDM(db, arrayData)

                }else{
                    fmt.Println(message)
                }
            }
     func InsertDM(db *sql.DB, arrayData map[string]interface{}){

       insertStatement:=`INSERT INTO discount (coupon_code,coupon_description,created_at) VALUES ($1,$2,$3)`
       _,errInsertDM := db.Exec(insertStatement,arrayData["coupon_code"],arrayData["coupon_description"],arrayData["timestamp"])

   // if there is an error inserting, handle it
       if errInsertDM != nil {
           message:=errInsertDM.Error()
          panic(message)
       }

       insertedData++

    }
}

i expected it can handle like 15.000 data, but that error always appear in like 3000 data