Golang compress-bzip2.NewReader类(方法)实例源码

下面列出了Golang compress-bzip2.NewReader 类(方法)源码代码实例,从而了解它的用法。

作者:arduin    项目:arduino-create-agen   
func extractBz2(body []byte, location string) (string, error) {
	bodyCopy := make([]byte, len(body))
	copy(bodyCopy, body)
	tarFile := bzip2.NewReader(bytes.NewReader(body))
	tarReader := tar.NewReader(tarFile)

	var dirList []string

	for {
		header, err := tarReader.Next()
		if err == io.EOF {
			break
		}
		dirList = append(dirList, header.Name)
	}

	basedir := findBaseDir(dirList)

	tarFile = bzip2.NewReader(bytes.NewReader(bodyCopy))
	tarReader = tar.NewReader(tarFile)

	for {
		header, err := tarReader.Next()
		if err == io.EOF {
			break
		} else if err != nil {
			//return location, err
		}

		path := filepath.Join(location, strings.Replace(header.Name, basedir, "", -1))
		info := header.FileInfo()

		if info.IsDir() {
			if err = os.MkdirAll(path, info.Mode()); err != nil {
				return location, err
			}
			continue
		}

		if header.Typeflag == tar.TypeSymlink {
			err = os.Symlink(header.Linkname, path)
			continue
		}

		file, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, info.Mode())
		if err == nil {
			defer file.Close()
		}
		_, err = io.Copy(file, tarReader)
		if err != nil {
			//return location, err
		}
	}
	return location, nil
}

作者:bv    项目:gtv   
func tarFilesCount(sourcefile string) int {
	flreader, _ := os.Open(sourcefile)
	defer flreader.Close()
	var fltarReader *tar.Reader
	var flReader io.ReadCloser = flreader

	if strings.HasSuffix(sourcefile, ".gz") ||
		strings.HasSuffix(sourcefile, ".tgz") {
		flgzipReader, err := gzip.NewReader(flreader)
		checkErr("In tarFilesCounter - NewReader", err)
		fltarReader = tar.NewReader(flgzipReader)
		defer flReader.Close()
	} else if strings.HasSuffix(sourcefile, ".bz2") {
		flbz2Reader := bzip2.NewReader(flreader)
		fltarReader = tar.NewReader(flbz2Reader)
	} else {
		fltarReader = tar.NewReader(flreader)
	}

	trfl := fltarReader
	counter := 0
	for {
		_, err := trfl.Next()
		if err != nil {
			if err == io.EOF {
				break
			}
			checkErr("Extract error::ReadTarArchive", err)
		}
		counter++
	}
	fmt.Println("Files in archive -", counter)
	return counter
}

作者:y0k0ta1    项目:golang_trainin   
func TestBzip2(t *testing.T) {
	var compressed, uncompressed bytes.Buffer
	w := bzip.NewWriter(&compressed)

	// Write a repetitive message in a million pieces,
	// compressing one copy but not the other.
	tee := io.MultiWriter(w, &uncompressed)
	for i := 0; i < 1000000; i++ {
		io.WriteString(tee, "hello")
	}
	if err := w.Close(); err != nil {
		t.Fatal(err)
	}

	// Check the size of the compressed stream.
	if got, want := compressed.Len(), 255; got != want {
		t.Errorf("1 million hellos compressed to %d bytes, want %d", got, want)
	}

	// Decompress and compare with original.
	var decompressed bytes.Buffer
	io.Copy(&decompressed, bzip2.NewReader(&compressed))
	if !bytes.Equal(uncompressed.Bytes(), decompressed.Bytes()) {
		t.Error("decompression yielded a different message")
	}
}

作者:pombredann    项目:snappy-   
func skipToArMember(arReader *ar.Reader, memberPrefix string) (io.Reader, error) {
	var err error

	// find the right ar member
	var header *ar.Header
	for {
		header, err = arReader.Next()
		if err != nil {
			return nil, err
		}
		if strings.HasPrefix(header.Name, memberPrefix) {
			break
		}
	}

	// figure out what compression to use
	var dataReader io.Reader
	switch {
	case strings.HasSuffix(header.Name, ".gz"):
		dataReader, err = gzip.NewReader(arReader)
		if err != nil {
			return nil, err
		}
	case strings.HasSuffix(header.Name, ".bz2"):
		dataReader = bzip2.NewReader(arReader)
	case strings.HasSuffix(header.Name, ".xz"):
		dataReader = xzPipeReader(arReader)
	default:
		return nil, fmt.Errorf("Can not handle %s", header.Name)
	}

	return dataReader, nil
}

作者:JustinAzof    项目:flow-indexe   
func OpenDecompress(fn string) (r io.ReadCloser, err error) {
	f, err := os.Open(fn)
	if err != nil {
		return nil, err
	}

	ext := filepath.Ext(fn)

	switch ext {
	case ".log", ".txt":
		return f, err
	case ".gz":
		gzr, err := gzip.NewReader(f)
		return &WrappedDecompressor{
			ReadCloser: gzr,
			wrapped:    f,
		}, err
	case ".bz2":
		bzr := bzip2.NewReader(f)
		return &WrappedDecompressor{
			ReadCloser: ioutil.NopCloser(bzr),
			wrapped:    f,
		}, nil
	default:
		return f, err
	}
}

作者:AlphaStaxLL    项目:rocke   
func maybeDecompress(rs io.ReadSeeker) (io.Reader, error) {
	// TODO(jonboulle): this is a bit redundant with detectValType
	typ, err := aci.DetectFileType(rs)
	if err != nil {
		return nil, err
	}
	if _, err := rs.Seek(0, 0); err != nil {
		return nil, err
	}
	var r io.Reader
	switch typ {
	case aci.TypeGzip:
		r, err = gzip.NewReader(rs)
		if err != nil {
			return nil, fmt.Errorf("error reading gzip: %v", err)
		}
	case aci.TypeBzip2:
		r = bzip2.NewReader(rs)
	case aci.TypeXz:
		r = aci.XzReader(rs)
	case aci.TypeTar:
		r = rs
	case aci.TypeUnknown:
		return nil, errors.New("unknown filetype")
	default:
		// should never happen
		panic(fmt.Sprintf("bad type returned from DetectFileType: %v", typ))
	}
	return r, nil
}

作者:dusti    项目:go-apr   
func getSampleLines(path string) []string {
	file, err := os.Open(path)
	if err != nil {
		panic("Could not open sample file: " + err.Error())
	}
	defer file.Close()

	bz := bzip2.NewReader(file)
	rv := make([]string, 0, 250000)

	bio := bufio.NewReader(bz)
	bytesread := int64(0)
	done := false

	for !done {
		line, err := bio.ReadString('\n')
		switch err {
		case nil:
			rv = append(rv, line)
			bytesread += int64(len(line))
		case io.EOF:
			done = true
		default:
			panic("Could not load samples: " + err.Error())
		}
	}

	return rv
}

作者:kippandre    项目:docke   
func DecompressStream(archive io.Reader) (io.ReadCloser, error) {
	buf := make([]byte, 10)
	totalN := 0
	for totalN < 10 {
		n, err := archive.Read(buf[totalN:])
		if err != nil {
			if err == io.EOF {
				return nil, fmt.Errorf("Tarball too short")
			}
			return nil, err
		}
		totalN += n
		utils.Debugf("[tar autodetect] n: %d", n)
	}
	compression := DetectCompression(buf)
	wrap := io.MultiReader(bytes.NewReader(buf), archive)

	switch compression {
	case Uncompressed:
		return ioutil.NopCloser(wrap), nil
	case Gzip:
		return gzip.NewReader(wrap)
	case Bzip2:
		return ioutil.NopCloser(bzip2.NewReader(wrap)), nil
	case Xz:
		return xzDecompress(wrap)
	default:
		return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension())
	}
}

作者:ddats    项目:docker-rebas   
func ls(path string) (map[string]bool, error) {
	f, err := os.Open(path)
	if err != nil {
		return nil, err
	}
	defer f.Close()

	files := make(map[string]bool)
	bz2R := bzip2.NewReader(f)
	r := tar.NewReader(bz2R)
	for {
		h, err := r.Next()
		if err == io.EOF {
			break
		} else if err != nil {
			return nil, err
		}

		files[h.Name] = true
	}

	// don't exclude the metadata
	delete(files, "./")
	delete(files, "repositories")

	return files, nil
}

作者:zebbra201    项目:dcr   
func loadBlocks(t *testing.T, file string) (blocks []*dcrutil.Block, err error) {
	fi, err := os.Open(file)
	if err != nil {
		t.Errorf("failed to open file %v, err %v", file, err)
		return nil, err
	}
	bcStream := bzip2.NewReader(fi)
	defer fi.Close()

	// Create a buffer of the read file
	bcBuf := new(bytes.Buffer)
	bcBuf.ReadFrom(bcStream)

	// Create decoder from the buffer and a map to store the data
	bcDecoder := gob.NewDecoder(bcBuf)
	blockchain := make(map[int64][]byte)

	// Decode the blockchain into the map
	if err := bcDecoder.Decode(&blockchain); err != nil {
		t.Errorf("error decoding test blockchain")
	}
	blocks = make([]*dcrutil.Block, 0, len(blockchain))
	for height := int64(1); height < int64(len(blockchain)); height++ {
		block, err := dcrutil.NewBlockFromBytes(blockchain[height])
		if err != nil {
			t.Errorf("failed to parse block %v", height)
			return nil, err
		}
		block.SetHeight(height - 1)
		blocks = append(blocks, block)
	}

	return
}

作者:AlphaStaxLL    项目:rocke   
// unpackRootfs unpacks a stage1 rootfs (compressed file, pointed to by rfs)
// into dir, returning any error encountered
func unpackRootfs(rfs string, dir string) error {
	fh, err := os.Open(rfs)
	if err != nil {
		return fmt.Errorf("error opening stage1 rootfs: %v", err)
	}
	typ, err := aci.DetectFileType(fh)
	if err != nil {
		return fmt.Errorf("error detecting image type: %v", err)
	}
	if _, err := fh.Seek(0, 0); err != nil {
		return fmt.Errorf("error seeking image: %v", err)
	}
	var r io.Reader
	switch typ {
	case aci.TypeGzip:
		r, err = gzip.NewReader(fh)
		if err != nil {
			return fmt.Errorf("error reading gzip: %v", err)
		}
	case aci.TypeBzip2:
		r = bzip2.NewReader(fh)
	case aci.TypeXz:
		r = aci.XzReader(fh)
	case aci.TypeTar:
		r = fh
	case aci.TypeUnknown:
		return fmt.Errorf("error: unknown image filetype")
	default:
		// should never happen
		panic("no type returned from DetectFileType?")
	}
	return untarRootfs(r, dir)
}

作者:lhchave    项目:quar   
func (b *benchmarkCase) Open() (io.ReadCloser, error) {
	buf := bytes.NewBufferString(b.casefile)
	return &benchmarkFile{
		Reader: bzip2.NewReader(base64.NewDecoder(base64.StdEncoding, buf)),
		buffer: buf,
	}, nil
}

作者:xia    项目:lumino   
// Attempts to extract a compressed project into the given destination.
func unpackExampleProject(root string) (err error) {
	var stat os.FileInfo

	// Validating destination.
	if stat, err = os.Stat(root); err != nil {
		return err
	}

	if stat.IsDir() == false {
		return fmt.Errorf("Expecting a directory.")
	}

	// Creating a tarbz2 reader.
	tbz := tar.NewReader(bzip2.NewReader(bytes.NewBuffer(compressedProject)))

	// Extracting tarred files.
	for {

		hdr, err := tbz.Next()

		if err != nil {
			if err == io.EOF {
				break
			}
			panic(err.Error())
		}

		// See http://en.wikipedia.org/wiki/Tar_(computing)
		filePath := root + pathSeparator + hdr.Name

		switch hdr.Typeflag {
		case '0':
			// Normal file
			fp, err := os.Create(filePath)

			if err != nil {
				return err
			}

			io.Copy(fp, tbz)

			err = os.Chmod(filePath, os.FileMode(hdr.Mode))

			if err != nil {
				return err
			}

			fp.Close()
		case '5':
			// Directory
			os.MkdirAll(filePath, os.FileMode(hdr.Mode))
		default:
			// fmt.Printf("--> %s, %d, %c\n", hdr.Name, hdr.Mode, hdr.Typeflag)
			panic(fmt.Sprintf("Unhandled tar type: %c in file: %s", hdr.Typeflag, hdr.Name))
		}
	}

	return nil

}

作者:nickbruu    项目:gocbzip   
// Test creating a writer, writing a significant amount of data to it, and
// closing it again.
func TestWriter_Write12MiB(t *testing.T) {
	// Generate a sequence of data.
	uncompressedStrideIn := make([]byte, 3*1024*1024)
	for i := 0; i < len(uncompressedStrideIn); i++ {
		uncompressedStrideIn[i] = byte(i % 256)
	}

	// Test with varying block sizes.
	for blockSize100k := 1; blockSize100k <= 9; blockSize100k++ {
		// Compress the data.
		compressed := AssertWriterWrites(t, blockSize100k, [][]byte{
			uncompressedStrideIn,
			uncompressedStrideIn,
			uncompressedStrideIn,
			uncompressedStrideIn,
		})

		// Decompress the data and verify equality.
		uncompressedOut, err := ioutil.ReadAll(bzip2.NewReader(bytes.NewBuffer(compressed)))
		if err != nil {
			t.Fatalf("Failed to decompress compressed bzip2 data: %v", err)
		}

		if len(uncompressedOut) != 4*len(uncompressedStrideIn) {
			t.Fatalf("Expected decompressed data to be %d B but it is %d B", 4*len(uncompressedStrideIn), len(uncompressedOut))
		}

		for i := 0; i < 4; i++ {
			if !bytes.Equal(uncompressedOut[i*len(uncompressedStrideIn):(i+1)*len(uncompressedStrideIn)], uncompressedStrideIn) {
				t.Fatalf("Decompressed data does not match original data")
			}
		}
	}
}

作者:nathj0    项目:go-wikipars   
func main() {
	flag.Parse()
	if *file == "" {
		log.Fatal("You must supply a bz2 dump file.")
	}
	session, err := mgo.Dial(*dburl)
	if err != nil {
		panic(err)
	}

	f, err := os.Open(*file)
	if err != nil {
		log.Fatalf("Error opening file: %v", err)
	}
	defer f.Close()

	z := bzip2.NewReader(f)

	p, err := wikiparse.NewParser(z)
	if err != nil {
		log.Fatalf("Error setting up new page parser:  %v", err)
	}

	err = session.DB(*dbname).C(*collection).EnsureIndex(titleIndex)
	if err != nil {
		log.Fatal("Error creating title index", err)
	}
	processDump(p, session.DB(*dbname))
}

作者:fawic    项目:resti   
// SetupTarTestFixture extracts the tarFile to outputDir.
func SetupTarTestFixture(t testing.TB, outputDir, tarFile string) {
	input, err := os.Open(tarFile)
	defer input.Close()
	OK(t, err)

	var rd io.Reader
	switch filepath.Ext(tarFile) {
	case ".gz":
		r, err := gzip.NewReader(input)
		OK(t, err)

		defer r.Close()
		rd = r
	case ".bzip2":
		rd = bzip2.NewReader(input)
	default:
		rd = input
	}

	cmd := exec.Command("tar", "xf", "-")
	cmd.Dir = outputDir

	cmd.Stdin = rd
	cmd.Stdout = os.Stdout
	cmd.Stderr = os.Stderr

	OK(t, cmd.Run())
}

作者:natefinc    项目:pcgre   
func read(name string, wg *sync.WaitGroup, reg *regexp.Regexp) {
	defer wg.Done()
	f, err := os.Open(name)
	if err != nil {
		errlog.Println(err)
		return
	}
	defer f.Close()
	var r io.Reader
	ext := filepath.Ext(name)
	switch ext {
	case ".gz":
		r, err = gzip.NewReader(f)
		if err != nil {
			log.Println(err)
			return
		}
	case ".bz":
		r = bzip2.NewReader(f)
	default:
		errlog.Println("Unknown extension:", ext)
		return
	}
	scanner := bufio.NewScanner(r)
	for scanner.Scan() {
		b := scanner.Bytes()
		if reg.Match(b) {
			log.Printf("%s: %s", name, b)
		}
	}
	if err := scanner.Err(); err != nil {
		errlog.Printf("Error while reading %s: %s", name, err)
	}
}

作者:reedobrie    项目:go-wikipars   
func multiStreamWorker(src IndexedParseSource, wg *sync.WaitGroup,
	p *multiStreamParser) {
	defer wg.Done()

	r, err := src.OpenData()
	if err != nil {
		log.Fatalf("Error opening data: %v", err)
	}
	defer r.Close()

	for idxChunk := range p.workerch {
		_, err := r.Seek(idxChunk.offset, 0)
		if err != nil {
			log.Fatalf("Error seeking to specified offset: %v", err)
		}
		bz := bzip2.NewReader(r)
		d := xml.NewDecoder(bz)

		for i := 0; i < idxChunk.count && err != io.EOF; i++ {
			newpage := &Page{}
			err = d.Decode(newpage)
			if err == nil {
				p.entries <- newpage
			}
		}
	}
}

作者:aphisti    项目:go.Zamar   
func (mpq *Mpq) File(filename string) (file *File, err error) {
	// First see if the file is already in the map
	file, found := mpq.files[filename]
	if !found {
		// If it's not, try to load it
		fileHash, hashErr := mpq.getHashEntry(filename)
		if hashErr != nil {
			err = fmt.Errorf("Unable to find file: %v",
				filename)
			return
		}
		fileBlock := mpq.BlockEntries[fileHash.BlockIndex]

		file = newFile(filename, fileHash, fileBlock)
		mpq.files[filename] = file
	}

	_, err = mpq.reader.Seek(int64(mpq.ArchiveOffset+
		file.block.FilePosition), 0)
	if err != nil {
		return
	}
	compType := make([]byte, 1)
	_, err = mpq.reader.Read(compType)
	if err != nil {
		return
	}

	file.compressionType = compType[0]
	switch file.compressionType {
	case CompressBzip2:
		mpq.fileReader = bzip2.NewReader(mpq.reader)
		break
	case CompressNone:
		// Explicitly found that this doesn't have
		// compression, so don't go back to the
		// original file position as in the default.
		mpq.fileReader = mpq.reader
		break
	default:
		// Don't know this compression type, so just
		// reset back one byte and read from there as
		// if this file doesn't have a header byte.  This
		// can happen if the file flags say the file is
		// compressed but the header byte doesn't exist.
		_, err = mpq.reader.Seek(int64(mpq.ArchiveOffset+
			file.block.FilePosition), 0)
		if err != nil {
			return
		}
		mpq.fileReader = mpq.reader
		break
	}

	mpq.file = file
	// Reset the number of bytes read from the file
	mpq.fileBytesRead = 0

	return
}

作者:hsavit    项目:go-probabl   
func readFile(fn string, ch chan<- string) {
	f, err := os.Open(fn)
	maybeFatal(err)
	defer f.Close()

	br := bufio.NewReader(bzip2.NewReader(f))

	for i := 0; ; i++ {
		b, err := br.ReadBytes('\n')
		switch err {
		case io.EOF:
			log.Printf("Processed %s lines total",
				humanize.Comma(int64(i)))
			return
		case nil:
			ch <- string(b[:len(b)-1])
		default:
			log.Fatalf("Error reading input: %v", err)
		}

		if i%100000 == 0 {
			log.Printf("Processed %s lines",
				humanize.Comma(int64(i)))
		}
	}
}


问题


面经


文章

微信
公众号

扫码关注公众号