package flate

import 

type fastEncL5 struct {
	fastGen
	table  [tableSize]tableEntry
	bTable [tableSize]tableEntryPrev
}

func ( *fastEncL5) ( *tokens,  []byte) {
	const (
		            = 12 - 1
		 = 1 + 1 + 
		         = 4
	)
	if debugDeflate && .cur < 0 {
		panic(fmt.Sprint("e.cur < 0: ", .cur))
	}

	// Protect against e.cur wraparound.
	for .cur >= bufferReset {
		if len(.hist) == 0 {
			for  := range .table[:] {
				.table[] = tableEntry{}
			}
			for  := range .bTable[:] {
				.bTable[] = tableEntryPrev{}
			}
			.cur = maxMatchOffset
			break
		}
		// Shift down everything in the table that isn't already too far away.
		 := .cur + int32(len(.hist)) - maxMatchOffset
		for  := range .table[:] {
			 := .table[].offset
			if  <=  {
				 = 0
			} else {
				 =  - .cur + maxMatchOffset
			}
			.table[].offset = 
		}
		for  := range .bTable[:] {
			 := .bTable[]
			if .Cur.offset <=  {
				.Cur.offset = 0
				.Prev.offset = 0
			} else {
				.Cur.offset = .Cur.offset - .cur + maxMatchOffset
				if .Prev.offset <=  {
					.Prev.offset = 0
				} else {
					.Prev.offset = .Prev.offset - .cur + maxMatchOffset
				}
			}
			.bTable[] = 
		}
		.cur = maxMatchOffset
	}

	 := .addBlock()

	// This check isn't in the Snappy implementation, but there, the caller
	// instead of the callee handles this case.
	if len() <  {
		// We do not fill the token table.
		// This will be picked up by caller.
		.n = uint16(len())
		return
	}

	// Override src
	 = .hist
	 := 

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := int32(len() - )

	// nextEmit is where in src the next emitLiteral should start from.
	 := load6432(, )
	for {
		const  = 6
		const  = 1

		 := 
		var  int32
		var  int32
		for {
			 := hashLen(, tableBits, )
			 := hash7(, tableBits)

			 = 
			 =  +  + (-)>>
			if  >  {
				goto 
			}
			// Fetch a short+long candidate
			 := .table[]
			 := .bTable[]
			 := load6432(, )
			 := tableEntry{offset:  + .cur}
			.table[] = 
			 := &.bTable[]
			.Cur, .Prev = , .Cur

			 = hashLen(, tableBits, )
			 = hash7(, tableBits)

			 = .Cur.offset - .cur
			if - < maxMatchOffset {
				if uint32() == load3232(, .Cur.offset-.cur) {
					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

					 := .Prev.offset - .cur
					if - < maxMatchOffset && uint32() == load3232(, .Prev.offset-.cur) {
						 = .matchlen(+4, +4, ) + 4
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							break
						}
					}
					break
				}
				 = .Prev.offset - .cur
				if - < maxMatchOffset && uint32() == load3232(, .Prev.offset-.cur) {
					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur
					break
				}
			}

			 = .offset - .cur
			if - < maxMatchOffset && uint32() == load3232(, .offset-.cur) {
				// Found a 4 match...
				 = .matchlen(+4, +4, ) + 4
				 = .bTable[]
				// Store the next match

				.table[] = tableEntry{offset:  + .cur}
				 := &.bTable[]
				.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

				// If the next long is a candidate, use that...
				 := .Cur.offset - .cur
				if - < maxMatchOffset {
					if load3232(, .Cur.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							break
						}
					}
					// If the previous long is a candidate, use that...
					 = .Prev.offset - .cur
					if - < maxMatchOffset && load3232(, .Prev.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							break
						}
					}
				}
				break
			}
			 = 
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		if  == 0 {
			// Extend the 4-byte match as long as possible.
			 = .matchlenLong(+4, +4, ) + 4
		} else if  == maxMatchLength {
			 += .matchlenLong(+, +, )
		}

		// Try to locate a better match by checking the end of best match...
		if  :=  + ;  < 30 &&  <  {
			// Allow some bytes at the beginning to mismatch.
			// Sweet spot is 2/3 bytes depending on input.
			// 3 is only a little better when it is but sometimes a lot worse.
			// The skipped bytes are tested in Extend backwards,
			// and still picked up as part of the match if they do.
			const  = 2
			 := .bTable[hash7(load6432(, ), tableBits)].Cur.offset
			 :=  - .cur -  + 
			 :=  + 
			 :=  - 
			if  >= 0 &&  < maxMatchOffset &&  > 0 {
				if  := .matchlenLong(, , );  >  {
					 = 
					 = 
					 = 
				}
			}
		}

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
			++
		}
		if  <  {
			if false {
				emitLiteral(, [:])
			} else {
				for ,  := range [:] {
					.tokens[.n] = token()
					.litHist[]++
					.n++
				}
			}
		}
		if debugDeflate {
			if  >=  {
				panic(fmt.Sprintln("s-t", , ))
			}
			if ( - ) > maxMatchOffset {
				panic(fmt.Sprintln("mmo", -))
			}
			if  < baseMatchLength {
				panic("bml")
			}
		}

		.AddMatchLong(, uint32(--baseMatchOffset))
		 += 
		 = 
		if  >=  {
			 =  + 1
		}

		if  >=  {
			goto 
		}

		// Store every 3rd hash in-between.
		if true {
			const  = 3
			 :=  -  + 1
			if  < -1 {
				 := load6432(, )
				 := tableEntry{offset:  + .cur}
				.table[hashLen(, tableBits, )] = 
				 := &.bTable[hash7(, tableBits)]
				.Cur, .Prev = , .Cur

				// Do an long at i+1
				 >>= 8
				 = tableEntry{offset: .offset + 1}
				 = &.bTable[hash7(, tableBits)]
				.Cur, .Prev = , .Cur

				// We only have enough bits for a short entry at i+2
				 >>= 8
				 = tableEntry{offset: .offset + 1}
				.table[hashLen(, tableBits, )] = 

				// Skip one - otherwise we risk hitting 's'
				 += 4
				for ;  < -1;  +=  {
					 := load6432(, )
					 := tableEntry{offset:  + .cur}
					 := tableEntry{offset: .offset + 1}
					 := &.bTable[hash7(, tableBits)]
					.Cur, .Prev = , .Cur
					.table[hashLen(>>8, tableBits, )] = 
				}
			}
		}

		// We could immediately start working at s now, but to improve
		// compression we first update the hash table at s-1 and at s.
		 := load6432(, -1)
		 := .cur +  - 1
		 := hashLen(, tableBits, )
		 := hash7(, tableBits)
		.table[] = tableEntry{offset: }
		 := &.bTable[]
		.Cur, .Prev = tableEntry{offset: }, .Cur
		 =  >> 8
	}

:
	if int() < len() {
		// If nothing was added, don't encode literals.
		if .n == 0 {
			return
		}

		emitLiteral(, [:])
	}
}

// fastEncL5Window is a level 5 encoder,
// but with a custom window size.
type fastEncL5Window struct {
	hist      []byte
	cur       int32
	maxOffset int32
	table     [tableSize]tableEntry
	bTable    [tableSize]tableEntryPrev
}

func ( *fastEncL5Window) ( *tokens,  []byte) {
	const (
		            = 12 - 1
		 = 1 + 1 + 
		         = 4
	)
	 := .maxOffset
	if debugDeflate && .cur < 0 {
		panic(fmt.Sprint("e.cur < 0: ", .cur))
	}

	// Protect against e.cur wraparound.
	for .cur >= bufferReset {
		if len(.hist) == 0 {
			for  := range .table[:] {
				.table[] = tableEntry{}
			}
			for  := range .bTable[:] {
				.bTable[] = tableEntryPrev{}
			}
			.cur = 
			break
		}
		// Shift down everything in the table that isn't already too far away.
		 := .cur + int32(len(.hist)) - 
		for  := range .table[:] {
			 := .table[].offset
			if  <=  {
				 = 0
			} else {
				 =  - .cur + 
			}
			.table[].offset = 
		}
		for  := range .bTable[:] {
			 := .bTable[]
			if .Cur.offset <=  {
				.Cur.offset = 0
				.Prev.offset = 0
			} else {
				.Cur.offset = .Cur.offset - .cur + 
				if .Prev.offset <=  {
					.Prev.offset = 0
				} else {
					.Prev.offset = .Prev.offset - .cur + 
				}
			}
			.bTable[] = 
		}
		.cur = 
	}

	 := .addBlock()

	// This check isn't in the Snappy implementation, but there, the caller
	// instead of the callee handles this case.
	if len() <  {
		// We do not fill the token table.
		// This will be picked up by caller.
		.n = uint16(len())
		return
	}

	// Override src
	 = .hist
	 := 

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := int32(len() - )

	// nextEmit is where in src the next emitLiteral should start from.
	 := load6432(, )
	for {
		const  = 6
		const  = 1

		 := 
		var  int32
		var  int32
		for {
			 := hashLen(, tableBits, )
			 := hash7(, tableBits)

			 = 
			 =  +  + (-)>>
			if  >  {
				goto 
			}
			// Fetch a short+long candidate
			 := .table[]
			 := .bTable[]
			 := load6432(, )
			 := tableEntry{offset:  + .cur}
			.table[] = 
			 := &.bTable[]
			.Cur, .Prev = , .Cur

			 = hashLen(, tableBits, )
			 = hash7(, tableBits)

			 = .Cur.offset - .cur
			if - <  {
				if uint32() == load3232(, .Cur.offset-.cur) {
					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

					 := .Prev.offset - .cur
					if - <  && uint32() == load3232(, .Prev.offset-.cur) {
						 = .matchlen(+4, +4, ) + 4
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							break
						}
					}
					break
				}
				 = .Prev.offset - .cur
				if - <  && uint32() == load3232(, .Prev.offset-.cur) {
					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur
					break
				}
			}

			 = .offset - .cur
			if - <  && uint32() == load3232(, .offset-.cur) {
				// Found a 4 match...
				 = .matchlen(+4, +4, ) + 4
				 = .bTable[]
				// Store the next match

				.table[] = tableEntry{offset:  + .cur}
				 := &.bTable[]
				.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

				// If the next long is a candidate, use that...
				 := .Cur.offset - .cur
				if - <  {
					if load3232(, .Cur.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							break
						}
					}
					// If the previous long is a candidate, use that...
					 = .Prev.offset - .cur
					if - <  && load3232(, .Prev.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							break
						}
					}
				}
				break
			}
			 = 
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		if  == 0 {
			// Extend the 4-byte match as long as possible.
			 = .matchlenLong(+4, +4, ) + 4
		} else if  == maxMatchLength {
			 += .matchlenLong(+, +, )
		}

		// Try to locate a better match by checking the end of best match...
		if  :=  + ;  < 30 &&  <  {
			// Allow some bytes at the beginning to mismatch.
			// Sweet spot is 2/3 bytes depending on input.
			// 3 is only a little better when it is but sometimes a lot worse.
			// The skipped bytes are tested in Extend backwards,
			// and still picked up as part of the match if they do.
			const  = 2
			 := .bTable[hash7(load6432(, ), tableBits)].Cur.offset
			 :=  - .cur -  + 
			 :=  + 
			 :=  - 
			if  >= 0 &&  <  &&  > 0 {
				if  := .matchlenLong(, , );  >  {
					 = 
					 = 
					 = 
				}
			}
		}

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
			++
		}
		if  <  {
			if false {
				emitLiteral(, [:])
			} else {
				for ,  := range [:] {
					.tokens[.n] = token()
					.litHist[]++
					.n++
				}
			}
		}
		if debugDeflate {
			if  >=  {
				panic(fmt.Sprintln("s-t", , ))
			}
			if ( - ) >  {
				panic(fmt.Sprintln("mmo", -))
			}
			if  < baseMatchLength {
				panic("bml")
			}
		}

		.AddMatchLong(, uint32(--baseMatchOffset))
		 += 
		 = 
		if  >=  {
			 =  + 1
		}

		if  >=  {
			goto 
		}

		// Store every 3rd hash in-between.
		if true {
			const  = 3
			 :=  -  + 1
			if  < -1 {
				 := load6432(, )
				 := tableEntry{offset:  + .cur}
				.table[hashLen(, tableBits, )] = 
				 := &.bTable[hash7(, tableBits)]
				.Cur, .Prev = , .Cur

				// Do an long at i+1
				 >>= 8
				 = tableEntry{offset: .offset + 1}
				 = &.bTable[hash7(, tableBits)]
				.Cur, .Prev = , .Cur

				// We only have enough bits for a short entry at i+2
				 >>= 8
				 = tableEntry{offset: .offset + 1}
				.table[hashLen(, tableBits, )] = 

				// Skip one - otherwise we risk hitting 's'
				 += 4
				for ;  < -1;  +=  {
					 := load6432(, )
					 := tableEntry{offset:  + .cur}
					 := tableEntry{offset: .offset + 1}
					 := &.bTable[hash7(, tableBits)]
					.Cur, .Prev = , .Cur
					.table[hashLen(>>8, tableBits, )] = 
				}
			}
		}

		// We could immediately start working at s now, but to improve
		// compression we first update the hash table at s-1 and at s.
		 := load6432(, -1)
		 := .cur +  - 1
		 := hashLen(, tableBits, )
		 := hash7(, tableBits)
		.table[] = tableEntry{offset: }
		 := &.bTable[]
		.Cur, .Prev = tableEntry{offset: }, .Cur
		 =  >> 8
	}

:
	if int() < len() {
		// If nothing was added, don't encode literals.
		if .n == 0 {
			return
		}

		emitLiteral(, [:])
	}
}

// Reset the encoding table.
func ( *fastEncL5Window) () {
	// We keep the same allocs, since we are compressing the same block sizes.
	if cap(.hist) < allocHistory {
		.hist = make([]byte, 0, allocHistory)
	}

	// We offset current position so everything will be out of reach.
	// If we are above the buffer reset it will be cleared anyway since len(hist) == 0.
	if .cur <= int32(bufferReset) {
		.cur += .maxOffset + int32(len(.hist))
	}
	.hist = .hist[:0]
}

func ( *fastEncL5Window) ( []byte) int32 {
	// check if we have space already
	 := .maxOffset

	if len(.hist)+len() > cap(.hist) {
		if cap(.hist) == 0 {
			.hist = make([]byte, 0, allocHistory)
		} else {
			if cap(.hist) < int(*2) {
				panic("unexpected buffer size")
			}
			// Move down
			 := int32(len(.hist)) - 
			copy(.hist[0:], .hist[:])
			.cur += 
			.hist = .hist[:]
		}
	}
	 := int32(len(.hist))
	.hist = append(.hist, ...)
	return 
}

// matchlen will return the match length between offsets and t in src.
// The maximum length returned is maxMatchLength - 4.
// It is assumed that s > t, that t >=0 and s < len(src).
func ( *fastEncL5Window) (,  int32,  []byte) int32 {
	if debugDecode {
		if  >=  {
			panic(fmt.Sprint("t >=s:", , ))
		}
		if int() >= len() {
			panic(fmt.Sprint("s >= len(src):", , len()))
		}
		if  < 0 {
			panic(fmt.Sprint("t < 0:", ))
		}
		if - > .maxOffset {
			panic(fmt.Sprint(, "-", , "(", -, ") > maxMatchLength (", maxMatchOffset, ")"))
		}
	}
	 := int() + maxMatchLength - 4
	if  > len() {
		 = len()
	}

	// Extend the match to be as long as possible.
	return int32(matchLen([:], [:]))
}

// matchlenLong will return the match length between offsets and t in src.
// It is assumed that s > t, that t >=0 and s < len(src).
func ( *fastEncL5Window) (,  int32,  []byte) int32 {
	if debugDeflate {
		if  >=  {
			panic(fmt.Sprint("t >=s:", , ))
		}
		if int() >= len() {
			panic(fmt.Sprint("s >= len(src):", , len()))
		}
		if  < 0 {
			panic(fmt.Sprint("t < 0:", ))
		}
		if - > .maxOffset {
			panic(fmt.Sprint(, "-", , "(", -, ") > maxMatchLength (", maxMatchOffset, ")"))
		}
	}
	// Extend the match to be as long as possible.
	return int32(matchLen([:], [:]))
}