Best Syzkaller code snippet using asset.xzCompressor
storage.go
Source:storage.go
...55func (storage *Storage) AssetTypeEnabled(assetType dashapi.AssetType) bool {56 return storage.cfg.IsEnabled(assetType)57}58func (storage *Storage) getDefaultCompressor() Compressor {59 return xzCompressor60}61type ExtraUploadArg struct {62 // It is assumed that paths constructed with same UniqueTag values63 // always correspond to an asset having the same content.64 UniqueTag string65 // If the asset being uploaded already exists (see above), don't return66 // an error, abort uploading and return the download URL.67 SkipIfExists bool68}69var ErrAssetTypeDisabled = errors.New("uploading assets of this type is disabled")70func (storage *Storage) assetPath(name string, extra *ExtraUploadArg) string {71 folderName := ""72 if extra != nil && extra.UniqueTag != "" {73 folderName = extra.UniqueTag74 } else {75 // The idea is to make a file name useful and yet unique.76 // So we put a file to a pseudo-unique "folder".77 folderNameBytes := sha256.Sum256([]byte(fmt.Sprintf("%v", time.Now().UnixNano())))78 folderName = fmt.Sprintf("%x", folderNameBytes)79 }80 const folderPrefix = 1281 if len(folderName) > folderPrefix {82 folderName = folderName[0:folderPrefix]83 }84 return fmt.Sprintf("%s/%s", folderName, name)85}86func (storage *Storage) uploadFileStream(reader io.Reader, assetType dashapi.AssetType,87 name string, extra *ExtraUploadArg) (string, error) {88 if name == "" {89 return "", fmt.Errorf("file name is not specified")90 }91 typeDescr := GetTypeDescription(assetType)92 if typeDescr == nil {93 return "", fmt.Errorf("asset type %s is unknown", assetType)94 }95 if !storage.AssetTypeEnabled(assetType) {96 return "", fmt.Errorf("not allowed to upload an asset of type %s: %w",97 assetType, ErrAssetTypeDisabled)98 }99 path := storage.assetPath(name, extra)100 req := &uploadRequest{101 savePath: path,102 contentType: typeDescr.ContentType,103 contentEncoding: typeDescr.ContentEncoding,104 preserveExtension: typeDescr.preserveExtension,105 }106 if req.contentType == "" {107 req.contentType = "application/octet-stream"108 }109 compressor := storage.getDefaultCompressor()110 if typeDescr.customCompressor != nil {111 compressor = typeDescr.customCompressor112 }113 res, err := compressor(req, storage.backend.upload)114 if existsErr, ok := err.(*FileExistsError); ok {115 storage.tracer.Log("asset %s already exists", path)116 if extra == nil || !extra.SkipIfExists {117 return "", err118 }119 // Let's just return the download URL.120 return storage.backend.downloadURL(existsErr.Path, storage.cfg.PublicAccess)121 } else if err != nil {122 return "", fmt.Errorf("failed to query writer: %w", err)123 } else {124 written, err := io.Copy(res.writer, reader)125 if err != nil {126 more := ""127 closeErr := res.writer.Close()128 if exiterr, ok := closeErr.(*exec.ExitError); ok {129 more = fmt.Sprintf(", process state '%s'", exiterr.ProcessState)130 }131 return "", fmt.Errorf("failed to redirect byte stream: copied %d bytes, error %w%s",132 written, err, more)133 }134 err = res.writer.Close()135 if err != nil {136 return "", fmt.Errorf("failed to close writer: %w", err)137 }138 }139 return storage.backend.downloadURL(res.path, storage.cfg.PublicAccess)140}141func (storage *Storage) UploadBuildAsset(reader io.Reader, fileName string, assetType dashapi.AssetType,142 build *dashapi.Build, extra *ExtraUploadArg) (dashapi.NewAsset, error) {143 const commitPrefix = 8144 commit := build.KernelCommit145 if len(commit) > commitPrefix {146 commit = commit[:commitPrefix]147 }148 baseName := filepath.Base(fileName)149 fileExt := filepath.Ext(baseName)150 name := fmt.Sprintf("%s-%s%s",151 strings.TrimSuffix(baseName, fileExt),152 commit,153 fileExt)154 url, err := storage.uploadFileStream(reader, assetType, name, extra)155 if err != nil {156 return dashapi.NewAsset{}, err157 }158 return dashapi.NewAsset{159 Type: assetType,160 DownloadURL: url,161 }, nil162}163func (storage *Storage) ReportBuildAssets(build *dashapi.Build, assets ...dashapi.NewAsset) error {164 // If the server denies the reques, we'll delete the orphaned file during deprecated files165 // deletion later.166 return storage.dash.AddBuildAssets(&dashapi.AddBuildAssetsReq{167 BuildID: build.ID,168 Assets: assets,169 })170}171var ErrAssetDoesNotExist = errors.New("the asset did not exist")172type FileExistsError struct {173 // The path gets changed by wrappers, so we need to return it back.174 Path string175}176func (e *FileExistsError) Error() string {177 return fmt.Sprintf("asset exists: %s", e.Path)178}179const deletionEmbargo = time.Hour * 24 * 7180// Best way: convert download URLs to paths.181// We don't want to risk killing all assets after a slight domain change.182func (storage *Storage) DeprecateAssets() error {183 resp, err := storage.dash.NeededAssetsList()184 if err != nil {185 return fmt.Errorf("failed to query needed assets: %w", err)186 }187 needed := map[string]bool{}188 for _, url := range resp.DownloadURLs {189 path, err := storage.backend.getPath(url)190 if err != nil {191 // If we failed to parse just one URL, let's stop the entire process.192 // Otherwise we'll start deleting still needed files we couldn't recognize.193 return fmt.Errorf("failed to parse '%s': %w", url, err)194 }195 needed[path] = true196 }197 storage.tracer.Log("queried needed assets: %#v", needed)198 existing, err := storage.backend.list()199 if err != nil {200 return fmt.Errorf("failed to query object list: %w", err)201 }202 toDelete := []string{}203 intersection := 0204 for _, obj := range existing {205 keep := false206 if time.Since(obj.createdAt) < deletionEmbargo {207 // To avoid races between object upload and object deletion, we don't delete208 // newly uploaded files for a while after they're uploaded.209 keep = true210 }211 if val, ok := needed[obj.path]; ok && val {212 keep = true213 intersection++214 }215 storage.tracer.Log("-- object %v, %v: keep %t", obj.path, obj.createdAt, keep)216 if !keep {217 toDelete = append(toDelete, obj.path)218 }219 }220 const intersectionCheckCutOff = 4221 if len(existing) > intersectionCheckCutOff && intersection == 0 {222 // This is a last-resort protection against possible dashboard bugs.223 // If the needed assets have no intersection with the existing assets,224 // don't delete anything. Otherwise, if it was a bug, we will lose all files.225 return fmt.Errorf("needed assets have almost no intersection with the existing ones")226 }227 for _, path := range toDelete {228 err := storage.backend.remove(path)229 storage.tracer.Log("-- deleted %v: %v", path, err)230 // Several syz-ci's might be sharing the same storage. So let's tolerate231 // races during file deletion.232 if err != nil && err != ErrAssetDoesNotExist {233 return fmt.Errorf("asset deletion failure: %w", err)234 }235 }236 return nil237}238type uploadRequest struct {239 savePath string240 contentEncoding string241 contentType string242 preserveExtension bool243}244type uploadResponse struct {245 path string246 writer io.WriteCloser247}248type storedObject struct {249 path string250 createdAt time.Time251}252type StorageBackend interface {253 upload(req *uploadRequest) (*uploadResponse, error)254 list() ([]storedObject, error)255 remove(path string) error256 downloadURL(path string, publicURL bool) (string, error)257 getPath(url string) (string, error)258}259type Compressor func(req *uploadRequest,260 next func(req *uploadRequest) (*uploadResponse, error)) (*uploadResponse, error)261func xzCompressor(req *uploadRequest,262 next func(req *uploadRequest) (*uploadResponse, error)) (*uploadResponse, error) {263 newReq := *req264 if !req.preserveExtension {265 newReq.savePath = fmt.Sprintf("%s.xz", newReq.savePath)266 }267 resp, err := next(&newReq)268 if err != nil {269 return nil, err270 }271 xzWriter, err := xz.NewWriter(resp.writer)272 if err != nil {273 return nil, fmt.Errorf("failed to create xz writer: %w", err)274 }275 return &uploadResponse{...
xzCompressor
Using AI Code Generation
1func main() {2 asset := asset{}3 asset.xzCompressor()4}5func main() {6 asset := asset{}7 asset.xzCompressor()8}9func main() {10 asset := asset{}11 asset.xzCompressor()12}13func main() {14 asset := asset{}15 asset.xzCompressor()16}17func main() {18 asset := asset{}19 asset.xzCompressor()20}21func main() {22 asset := asset{}23 asset.xzCompressor()24}25func main() {26 asset := asset{}27 asset.xzCompressor()28}29func main() {30 asset := asset{}31 asset.xzCompressor()32}33func main() {34 asset := asset{}35 asset.xzCompressor()36}37func main() {38 asset := asset{}39 asset.xzCompressor()40}41func main() {42 asset := asset{}43 asset.xzCompressor()44}45func main() {46 asset := asset{}47 asset.xzCompressor()48}49func main() {50 asset := asset{}51 asset.xzCompressor()52}53func main() {54 asset := asset{}55 asset.xzCompressor()56}
xzCompressor
Using AI Code Generation
1func main() {2 asset := asset.NewAsset("test.txt", "test")3 asset.Compress(xzCompressor{})4}5func main() {6 asset := asset.NewAsset("test.txt", "test")7 asset.Compress(gzipCompressor{})8}9func main() {10 asset := asset.NewAsset("test.txt", "test")11 asset.Compress(bzip2Compressor{})12}13func main() {14 asset := asset.NewAsset("test.txt", "test")15 asset.Compress(deflateCompressor{})16}17func main() {18 asset := asset.NewAsset("test.txt", "test")19 asset.Compress(snappyCompressor{})20}21func main() {22 asset := asset.NewAsset("test.txt", "test")23 asset.Compress(lz4Compressor{})24}25func main() {26 asset := asset.NewAsset("test.txt", "test")27 asset.Compress(lzmaCompressor{})28}29func main() {30 asset := asset.NewAsset("test.txt", "test")31 asset.Compress(lz4hcCompressor{})32}33func main() {34 asset := asset.NewAsset("test.txt", "test")35 asset.Compress(zstdCompressor{})36}37func main() {38 asset := asset.NewAsset("test.txt", "test")39 asset.Compress(flateCompressor{})40}41func main() {42 asset := asset.NewAsset("test.txt", "test")43 asset.Compress(flate
xzCompressor
Using AI Code Generation
1import (2func main() {3 a, err := asset.New("test.txt", asset.XzCompressor)4 if err != nil {5 fmt.Println(err)6 }7 err = a.Compress()8 if err != nil {9 fmt.Println(err)10 }11}12import (13func main() {14 a, err := asset.New("test.txt", asset.XzCompressor)15 if err != nil {16 fmt.Println(err)17 }18 err = a.Decompress()19 if err != nil {20 fmt.Println(err)21 }22}23import (24func main() {25 a, err := asset.New("test.txt", asset.XzCompressor)26 if err != nil {27 fmt.Println(err)28 }29 err = a.Compress()30 if err != nil {31 fmt.Println(err)32 }33 err = a.Decompress()34 if err != nil {35 fmt.Println(err)36 }37}38import (39func main() {40 a, err := asset.New("test.txt", asset.XzCompressor)41 if err != nil {42 fmt.Println(err)43 }44 err = a.Compress()45 if err != nil {46 fmt.Println(err)47 }
xzCompressor
Using AI Code Generation
1func main() {2 asset := asset{}3 asset.xzCompressor("input.txt")4}5func main() {6 asset := asset{}7 asset.xzCompressor("input.txt")8}9func main() {10 asset := asset{}11 asset.xzCompressor("input.txt")12}13func main() {14 asset := asset{}15 asset.xzCompressor("input.txt")16}17func main() {18 asset := asset{}19 asset.xzCompressor("input.txt")20}21func main() {22 asset := asset{}23 asset.xzCompressor("input.txt")24}25func main() {26 asset := asset{}27 asset.xzCompressor("input.txt")28}29func main() {30 asset := asset{}
xzCompressor
Using AI Code Generation
1import (2func main() {3 var (4 canvas = svg.New(os.Stdout)5 canvas.Start(width, height)6 canvas.Rect(0, 0, width, height, "fill:rgb(0,0,255)")7 canvas.Text(10, 20, "Hello, World!", "fill:rgb(255,255,255)")8 canvas.End()9}
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!