Fix data URI scramble (#16098)
* Removed unused method. * No prefix for data uris. * Added test to prevent regressions.
This commit is contained in:
		
							parent
							
								
									0909695204
								
							
						
					
					
						commit
						21cde5c439
					
				
					 4 changed files with 23 additions and 19 deletions
				
			
		|  | @ -364,24 +364,19 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node, visitText | |||
| 		} | ||||
| 	case html.ElementNode: | ||||
| 		if node.Data == "img" { | ||||
| 			attrs := node.Attr | ||||
| 			for idx, attr := range attrs { | ||||
| 			for _, attr := range node.Attr { | ||||
| 				if attr.Key != "src" { | ||||
| 					continue | ||||
| 				} | ||||
| 				link := []byte(attr.Val) | ||||
| 				if len(link) > 0 && !IsLink(link) { | ||||
| 				if len(attr.Val) > 0 && !isLinkStr(attr.Val) && !strings.HasPrefix(attr.Val, "data:image/") { | ||||
| 					prefix := ctx.URLPrefix | ||||
| 					if ctx.IsWiki { | ||||
| 						prefix = util.URLJoin(prefix, "wiki", "raw") | ||||
| 					} | ||||
| 					prefix = strings.Replace(prefix, "/src/", "/media/", 1) | ||||
| 
 | ||||
| 					lnk := string(link) | ||||
| 					lnk = util.URLJoin(prefix, lnk) | ||||
| 					link = []byte(lnk) | ||||
| 					attr.Val = util.URLJoin(prefix, attr.Val) | ||||
| 				} | ||||
| 				node.Attr[idx].Val = string(link) | ||||
| 			} | ||||
| 		} else if node.Data == "a" { | ||||
| 			visitText = false | ||||
|  |  | |||
|  | @ -444,3 +444,23 @@ func Test_ParseClusterFuzz(t *testing.T) { | |||
| 	assert.NoError(t, err) | ||||
| 	assert.NotContains(t, res.String(), "<html") | ||||
| } | ||||
| 
 | ||||
| func TestIssue16020(t *testing.T) { | ||||
| 	setting.AppURL = AppURL | ||||
| 	setting.AppSubURL = AppSubURL | ||||
| 
 | ||||
| 	var localMetas = map[string]string{ | ||||
| 		"user": "go-gitea", | ||||
| 		"repo": "gitea", | ||||
| 	} | ||||
| 
 | ||||
| 	data := `<img src="data:image/png;base64,i//V"/>` | ||||
| 
 | ||||
| 	var res strings.Builder | ||||
| 	err := PostProcess(&RenderContext{ | ||||
| 		URLPrefix: "https://example.com", | ||||
| 		Metas:     localMetas, | ||||
| 	}, strings.NewReader(data), &res) | ||||
| 	assert.NoError(t, err) | ||||
| 	assert.Equal(t, data, res.String()) | ||||
| } | ||||
|  |  | |||
|  | @ -131,13 +131,3 @@ func SanitizeReader(r io.Reader) *bytes.Buffer { | |||
| 	NewSanitizer() | ||||
| 	return sanitizer.policy.SanitizeReader(r) | ||||
| } | ||||
| 
 | ||||
| // SanitizeBytes takes a []byte slice that contains a HTML fragment or document and applies policy whitelist.
 | ||||
| func SanitizeBytes(b []byte) []byte { | ||||
| 	if len(b) == 0 { | ||||
| 		// nothing to sanitize
 | ||||
| 		return b | ||||
| 	} | ||||
| 	NewSanitizer() | ||||
| 	return sanitizer.policy.SanitizeBytes(b) | ||||
| } | ||||
|  |  | |||
|  | @ -49,7 +49,6 @@ func Test_Sanitizer(t *testing.T) { | |||
| 
 | ||||
| 	for i := 0; i < len(testCases); i += 2 { | ||||
| 		assert.Equal(t, testCases[i+1], Sanitize(testCases[i])) | ||||
| 		assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i])))) | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in a new issue