Backport #16465 One of the reasons why #16447 was needed and why #16268 was needed in the first place was because it appears that editing ldap configuration doesn't get tested. This PR therefore adds a basic test that will run the edit pipeline. In doing so it's now clear that #16447 and #16268 aren't actually solving #16252. It turns out that what actually happens is that is that the bytes are actually double encoded. This PR now changes the json unmarshal wrapper to handle this double encode. Fix #16252 Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: 6543 <6543@obermui.de>
This commit is contained in:
		
							parent
							
								
									057205a4b7
								
							
						
					
					
						commit
						0b06b2019f
					
				
					 3 changed files with 87 additions and 13 deletions
				
			
		|  | @ -144,6 +144,60 @@ func TestLDAPUserSignin(t *testing.T) { | ||||||
| 	assert.Equal(t, u.Email, htmlDoc.Find(`label[for="email"]`).Siblings().First().Text()) | 	assert.Equal(t, u.Email, htmlDoc.Find(`label[for="email"]`).Siblings().First().Text()) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | func TestLDAPAuthChange(t *testing.T) { | ||||||
|  | 	defer prepareTestEnv(t)() | ||||||
|  | 	addAuthSourceLDAP(t, "") | ||||||
|  | 
 | ||||||
|  | 	session := loginUser(t, "user1") | ||||||
|  | 	req := NewRequest(t, "GET", "/admin/auths") | ||||||
|  | 	resp := session.MakeRequest(t, req, http.StatusOK) | ||||||
|  | 	doc := NewHTMLParser(t, resp.Body) | ||||||
|  | 	href, exists := doc.Find("table.table td a").Attr("href") | ||||||
|  | 	if !exists { | ||||||
|  | 		assert.True(t, exists, "No authentication source found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	req = NewRequest(t, "GET", href) | ||||||
|  | 	resp = session.MakeRequest(t, req, http.StatusOK) | ||||||
|  | 	doc = NewHTMLParser(t, resp.Body) | ||||||
|  | 	csrf := doc.GetCSRF() | ||||||
|  | 	host, _ := doc.Find(`input[name="host"]`).Attr("value") | ||||||
|  | 	assert.Equal(t, host, getLDAPServerHost()) | ||||||
|  | 	binddn, _ := doc.Find(`input[name="bind_dn"]`).Attr("value") | ||||||
|  | 	assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com") | ||||||
|  | 
 | ||||||
|  | 	req = NewRequestWithValues(t, "POST", href, map[string]string{ | ||||||
|  | 		"_csrf":                    csrf, | ||||||
|  | 		"type":                     "2", | ||||||
|  | 		"name":                     "ldap", | ||||||
|  | 		"host":                     getLDAPServerHost(), | ||||||
|  | 		"port":                     "389", | ||||||
|  | 		"bind_dn":                  "uid=gitea,ou=service,dc=planetexpress,dc=com", | ||||||
|  | 		"bind_password":            "password", | ||||||
|  | 		"user_base":                "ou=people,dc=planetexpress,dc=com", | ||||||
|  | 		"filter":                   "(&(objectClass=inetOrgPerson)(memberOf=cn=git,ou=people,dc=planetexpress,dc=com)(uid=%s))", | ||||||
|  | 		"admin_filter":             "(memberOf=cn=admin_staff,ou=people,dc=planetexpress,dc=com)", | ||||||
|  | 		"restricted_filter":        "(uid=leela)", | ||||||
|  | 		"attribute_username":       "uid", | ||||||
|  | 		"attribute_name":           "givenName", | ||||||
|  | 		"attribute_surname":        "sn", | ||||||
|  | 		"attribute_mail":           "mail", | ||||||
|  | 		"attribute_ssh_public_key": "", | ||||||
|  | 		"is_sync_enabled":          "on", | ||||||
|  | 		"is_active":                "on", | ||||||
|  | 	}) | ||||||
|  | 	session.MakeRequest(t, req, http.StatusFound) | ||||||
|  | 
 | ||||||
|  | 	req = NewRequest(t, "GET", href) | ||||||
|  | 	resp = session.MakeRequest(t, req, http.StatusOK) | ||||||
|  | 	doc = NewHTMLParser(t, resp.Body) | ||||||
|  | 	host, _ = doc.Find(`input[name="host"]`).Attr("value") | ||||||
|  | 	assert.Equal(t, host, getLDAPServerHost()) | ||||||
|  | 	binddn, _ = doc.Find(`input[name="bind_dn"]`).Attr("value") | ||||||
|  | 	assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com") | ||||||
|  | } | ||||||
|  | 
 | ||||||
| func TestLDAPUserSync(t *testing.T) { | func TestLDAPUserSync(t *testing.T) { | ||||||
| 	if skipLDAPTests() { | 	if skipLDAPTests() { | ||||||
| 		t.Skip() | 		t.Skip() | ||||||
|  |  | ||||||
|  | @ -7,6 +7,7 @@ package models | ||||||
| 
 | 
 | ||||||
| import ( | import ( | ||||||
| 	"crypto/tls" | 	"crypto/tls" | ||||||
|  | 	"encoding/binary" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"net/smtp" | 	"net/smtp" | ||||||
|  | @ -70,11 +71,30 @@ var ( | ||||||
| 	_ convert.Conversion = &SSPIConfig{} | 	_ convert.Conversion = &SSPIConfig{} | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| // jsonUnmarshalIgnoreErroneousBOM - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
 | // jsonUnmarshalHandleDoubleEncode - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
 | ||||||
| // possible that a Blob may gain an unwanted prefix of 0xff 0xfe.
 | // possible that a Blob may be double encoded or gain an unwanted prefix of 0xff 0xfe.
 | ||||||
| func jsonUnmarshalIgnoreErroneousBOM(bs []byte, v interface{}) error { | func jsonUnmarshalHandleDoubleEncode(bs []byte, v interface{}) error { | ||||||
| 	json := jsoniter.ConfigCompatibleWithStandardLibrary | 	json := jsoniter.ConfigCompatibleWithStandardLibrary | ||||||
| 	err := json.Unmarshal(bs, v) | 	err := json.Unmarshal(bs, v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		ok := true | ||||||
|  | 		rs := []byte{} | ||||||
|  | 		temp := make([]byte, 2) | ||||||
|  | 		for _, rn := range string(bs) { | ||||||
|  | 			if rn > 0xffff { | ||||||
|  | 				ok = false | ||||||
|  | 				break | ||||||
|  | 			} | ||||||
|  | 			binary.LittleEndian.PutUint16(temp, uint16(rn)) | ||||||
|  | 			rs = append(rs, temp...) | ||||||
|  | 		} | ||||||
|  | 		if ok { | ||||||
|  | 			if rs[0] == 0xff && rs[1] == 0xfe { | ||||||
|  | 				rs = rs[2:] | ||||||
|  | 			} | ||||||
|  | 			err = json.Unmarshal(rs, v) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
| 	if err != nil && len(bs) > 2 && bs[0] == 0xff && bs[1] == 0xfe { | 	if err != nil && len(bs) > 2 && bs[0] == 0xff && bs[1] == 0xfe { | ||||||
| 		err = json.Unmarshal(bs[2:], v) | 		err = json.Unmarshal(bs[2:], v) | ||||||
| 	} | 	} | ||||||
|  | @ -88,7 +108,7 @@ type LDAPConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a LDAPConfig from serialized format.
 | // FromDB fills up a LDAPConfig from serialized format.
 | ||||||
| func (cfg *LDAPConfig) FromDB(bs []byte) error { | func (cfg *LDAPConfig) FromDB(bs []byte) error { | ||||||
| 	err := jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	err := jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  | @ -129,7 +149,7 @@ type SMTPConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up an SMTPConfig from serialized format.
 | // FromDB fills up an SMTPConfig from serialized format.
 | ||||||
| func (cfg *SMTPConfig) FromDB(bs []byte) error { | func (cfg *SMTPConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports an SMTPConfig to a serialized format.
 | // ToDB exports an SMTPConfig to a serialized format.
 | ||||||
|  | @ -146,7 +166,7 @@ type PAMConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a PAMConfig from serialized format.
 | // FromDB fills up a PAMConfig from serialized format.
 | ||||||
| func (cfg *PAMConfig) FromDB(bs []byte) error { | func (cfg *PAMConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a PAMConfig to a serialized format.
 | // ToDB exports a PAMConfig to a serialized format.
 | ||||||
|  | @ -167,7 +187,7 @@ type OAuth2Config struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up an OAuth2Config from serialized format.
 | // FromDB fills up an OAuth2Config from serialized format.
 | ||||||
| func (cfg *OAuth2Config) FromDB(bs []byte) error { | func (cfg *OAuth2Config) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports an SMTPConfig to a serialized format.
 | // ToDB exports an SMTPConfig to a serialized format.
 | ||||||
|  | @ -187,7 +207,7 @@ type SSPIConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up an SSPIConfig from serialized format.
 | // FromDB fills up an SSPIConfig from serialized format.
 | ||||||
| func (cfg *SSPIConfig) FromDB(bs []byte) error { | func (cfg *SSPIConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports an SSPIConfig to a serialized format.
 | // ToDB exports an SSPIConfig to a serialized format.
 | ||||||
|  |  | ||||||
|  | @ -28,7 +28,7 @@ type UnitConfig struct{} | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a UnitConfig from serialized format.
 | // FromDB fills up a UnitConfig from serialized format.
 | ||||||
| func (cfg *UnitConfig) FromDB(bs []byte) error { | func (cfg *UnitConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a UnitConfig to a serialized format.
 | // ToDB exports a UnitConfig to a serialized format.
 | ||||||
|  | @ -44,7 +44,7 @@ type ExternalWikiConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a ExternalWikiConfig from serialized format.
 | // FromDB fills up a ExternalWikiConfig from serialized format.
 | ||||||
| func (cfg *ExternalWikiConfig) FromDB(bs []byte) error { | func (cfg *ExternalWikiConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a ExternalWikiConfig to a serialized format.
 | // ToDB exports a ExternalWikiConfig to a serialized format.
 | ||||||
|  | @ -62,7 +62,7 @@ type ExternalTrackerConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a ExternalTrackerConfig from serialized format.
 | // FromDB fills up a ExternalTrackerConfig from serialized format.
 | ||||||
| func (cfg *ExternalTrackerConfig) FromDB(bs []byte) error { | func (cfg *ExternalTrackerConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a ExternalTrackerConfig to a serialized format.
 | // ToDB exports a ExternalTrackerConfig to a serialized format.
 | ||||||
|  | @ -80,7 +80,7 @@ type IssuesConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a IssuesConfig from serialized format.
 | // FromDB fills up a IssuesConfig from serialized format.
 | ||||||
| func (cfg *IssuesConfig) FromDB(bs []byte) error { | func (cfg *IssuesConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a IssuesConfig to a serialized format.
 | // ToDB exports a IssuesConfig to a serialized format.
 | ||||||
|  | @ -104,7 +104,7 @@ type PullRequestsConfig struct { | ||||||
| 
 | 
 | ||||||
| // FromDB fills up a PullRequestsConfig from serialized format.
 | // FromDB fills up a PullRequestsConfig from serialized format.
 | ||||||
| func (cfg *PullRequestsConfig) FromDB(bs []byte) error { | func (cfg *PullRequestsConfig) FromDB(bs []byte) error { | ||||||
| 	return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg) | 	return jsonUnmarshalHandleDoubleEncode(bs, &cfg) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // ToDB exports a PullRequestsConfig to a serialized format.
 | // ToDB exports a PullRequestsConfig to a serialized format.
 | ||||||
|  |  | ||||||
		Loading…
	
		Reference in a new issue