Best Syzkaller code snippet using gcs.Publish
publish.go
Source:publish.go
...22 "sigs.k8s.io/release-sdk/object"23 "sigs.k8s.io/release-utils/http"24 "sigs.k8s.io/release-utils/util"25)26// Publisher is the structure for publishing anything release related27type Publisher struct {28 client publisherClient29}30// NewPublisher creates a new Publisher instance31func NewPublisher() *Publisher {32 objStore := *object.NewGCS()33 objStore.SetOptions(34 objStore.WithNoClobber(false),35 )36 return &Publisher{37 client: &defaultPublisher{&objStore},38 }39}40// SetClient can be used to set the internal publisher client41func (p *Publisher) SetClient(client publisherClient) {42 p.client = client43}44// publisherClient is a client for working with GCS45//46//counterfeiter:generate . publisherClient47type publisherClient interface {48 GSUtil(args ...string) error49 GSUtilOutput(args ...string) (string, error)50 GSUtilStatus(args ...string) (bool, error)51 GetURLResponse(url string) (string, error)52 GetReleasePath(bucket, gcsRoot, version string, fast bool) (string, error)53 GetMarkerPath(bucket, gcsRoot string) (string, error)54 NormalizePath(pathParts ...string) (string, error)55 TempDir(dir, pattern string) (name string, err error)56 CopyToLocal(remote, local string) error57 ReadFile(filename string) ([]byte, error)58 Unmarshal(data []byte, v interface{}) error59 Marshal(v interface{}) ([]byte, error)60 TempFile(dir, pattern string) (f *os.File, err error)61 CopyToRemote(local, remote string) error62}63type defaultPublisher struct {64 objStore object.Store65}66func (*defaultPublisher) GSUtil(args ...string) error {67 return gcli.GSUtil(args...)68}69func (*defaultPublisher) GSUtilOutput(args ...string) (string, error) {70 return gcli.GSUtilOutput(args...)71}72func (*defaultPublisher) GSUtilStatus(args ...string) (bool, error) {73 status, err := gcli.GSUtilStatus(args...)74 if err != nil {75 return false, err76 }77 return status.Success(), nil78}79func (*defaultPublisher) GetURLResponse(url string) (string, error) {80 return http.GetURLResponse(url, true)81}82func (d *defaultPublisher) GetReleasePath(83 bucket, gcsRoot, version string, fast bool,84) (string, error) {85 return d.objStore.GetReleasePath(bucket, gcsRoot, version, fast)86}87func (d *defaultPublisher) GetMarkerPath(88 bucket, gcsRoot string,89) (string, error) {90 return d.objStore.GetMarkerPath(bucket, gcsRoot)91}92func (d *defaultPublisher) NormalizePath(pathParts ...string) (string, error) {93 return d.objStore.NormalizePath(pathParts...)94}95func (*defaultPublisher) TempDir(dir, pattern string) (name string, err error) {96 return os.MkdirTemp(dir, pattern)97}98func (d *defaultPublisher) CopyToLocal(remote, local string) error {99 return d.objStore.CopyToLocal(remote, local)100}101func (*defaultPublisher) ReadFile(filename string) ([]byte, error) {102 return os.ReadFile(filename)103}104func (*defaultPublisher) Unmarshal(data []byte, v interface{}) error {105 return json.Unmarshal(data, v)106}107func (*defaultPublisher) Marshal(v interface{}) ([]byte, error) {108 return json.Marshal(v)109}110func (*defaultPublisher) TempFile(dir, pattern string) (f *os.File, err error) {111 return os.CreateTemp(dir, pattern)112}113func (d *defaultPublisher) CopyToRemote(local, remote string) error {114 return d.objStore.CopyToRemote(local, remote)115}116// Publish a new version, (latest or stable) but only if the files actually117// exist on GCS and the artifacts we're dealing with are newer than the118// contents in GCS.119// buildType - One of 'release' or 'ci'120// version - The version121// buildDir - build output directory122// bucket - GCS bucket123// gcsRoot - The top-level GCS directory builds will be released to124//125// Expected destination format:126//127// gs://<bucket>/<gcsRoot>[/fast]/<version>128func (p *Publisher) PublishVersion(129 buildType, version, buildDir, bucket, gcsRoot string,130 extraVersionMarkers []string,131 privateBucket, fast bool,132) error {133 logrus.Info("Publishing version")134 releaseType := "latest"135 if buildType == "release" {136 // For release/ targets, type should be 'stable'137 if !(strings.Contains(version, ReleaseTypeAlpha) ||138 strings.Contains(version, ReleaseTypeBeta) ||139 strings.Contains(version, ReleaseTypeRC)) {140 releaseType = "stable"141 }142 }143 sv, err := util.TagStringToSemver(version)144 if err != nil {145 return fmt.Errorf("invalid version %s", version)146 }147 markerPath, markerPathErr := p.client.GetMarkerPath(148 bucket,149 gcsRoot,150 )151 if markerPathErr != nil {152 return fmt.Errorf("get version marker path: %w", markerPathErr)153 }154 releasePath, releasePathErr := p.client.GetReleasePath(155 bucket,156 gcsRoot,157 version,158 fast,159 )160 if releasePathErr != nil {161 return fmt.Errorf("get release path: %w", releasePathErr)162 }163 // TODO: This should probably be a more thorough check of explicit files164 // TODO: This should explicitly do a `gsutil ls` via gcs.PathExists165 if err := p.client.GSUtil("ls", releasePath); err != nil {166 return fmt.Errorf("release files don't exist at %s: %w", releasePath, err)167 }168 var versionMarkers []string169 if fast {170 versionMarkers = append(171 versionMarkers,172 releaseType+"-fast",173 )174 } else {175 versionMarkers = append(176 versionMarkers,177 releaseType,178 fmt.Sprintf("%s-%d", releaseType, sv.Major),179 fmt.Sprintf("%s-%d.%d", releaseType, sv.Major, sv.Minor),180 )181 }182 if len(extraVersionMarkers) > 0 {183 versionMarkers = append(versionMarkers, extraVersionMarkers...)184 }185 logrus.Infof("Publish version markers: %v", versionMarkers)186 logrus.Infof("Publish official pointer text files to %s", markerPath)187 for _, file := range versionMarkers {188 versionMarker := file + ".txt"189 needsUpdate, err := p.VerifyLatestUpdate(190 versionMarker, markerPath, version,191 )192 if err != nil {193 return fmt.Errorf("verify latest update for %s: %w", versionMarker, err)194 }195 // If there's a version that's above the one we're trying to release,196 // don't do anything, and just try the next one.197 if !needsUpdate {198 logrus.Infof(199 "Skipping %s for %s because it does not need to be updated",200 versionMarker, version,201 )202 continue203 }204 if err := p.PublishToGcs(205 versionMarker, buildDir, markerPath, version, privateBucket,206 ); err != nil {207 return fmt.Errorf("publish release to GCS: %w", err)208 }209 }210 return nil211}212// VerifyLatestUpdate checks if the new version is greater than the version213// currently published on GCS. It returns `true` for `needsUpdate` if the remote214// version does not exist or needs to be updated.215// publishFile - the version marker to look for216// markerPath - the GCS path to search for the version marker in217// version - release version218func (p *Publisher) VerifyLatestUpdate(219 publishFile, markerPath, version string,220) (needsUpdate bool, err error) {221 logrus.Infof("Testing %s > %s (published)", version, publishFile)222 publishFileDst, publishFileDstErr := p.client.NormalizePath(markerPath, publishFile)223 if publishFileDstErr != nil {224 return false, fmt.Errorf("get marker file destination: %w", publishFileDstErr)225 }226 // TODO: Should we add a object.`GCS` method for `gsutil cat`?227 gcsVersion, err := p.client.GSUtilOutput("cat", publishFileDst)228 if err != nil {229 logrus.Infof("%s does not exist but will be created", publishFileDst)230 return true, nil231 }232 sv, err := util.TagStringToSemver(version)233 if err != nil {234 return false, fmt.Errorf("invalid version format %s", version)235 }236 gcsSemverVersion, err := util.TagStringToSemver(gcsVersion)237 if err != nil {238 return false, fmt.Errorf("invalid GCS version format %s", gcsVersion)239 }240 if sv.LTE(gcsSemverVersion) {241 logrus.Infof(242 "Not updating version, because %s <= %s", version, gcsVersion,243 )244 return false, nil245 }246 logrus.Infof("Updating version, because %s > %s", version, gcsVersion)247 return true, nil248}249// PublishToGcs publishes a release to GCS250// publishFile - the GCS location to look in251// buildDir - build output directory252// markerPath - the GCS path to publish a version marker to253// version - release version254func (p *Publisher) PublishToGcs(255 publishFile, buildDir, markerPath, version string,256 privateBucket bool,257) error {258 releaseStage := filepath.Join(buildDir, ReleaseStagePath)259 publishFileDst, publishFileDstErr := p.client.NormalizePath(markerPath, publishFile)260 if publishFileDstErr != nil {261 return fmt.Errorf("get marker file destination: %w", publishFileDstErr)262 }263 publicLink := fmt.Sprintf("%s/%s", URLPrefixForBucket(markerPath), publishFile)264 if strings.HasPrefix(markerPath, ProductionBucket) {265 publicLink = fmt.Sprintf("%s/%s", ProductionBucketURL, publishFile)266 }267 uploadDir := filepath.Join(releaseStage, "upload")268 if err := os.MkdirAll(uploadDir, os.FileMode(0o755)); err != nil {269 return fmt.Errorf("create upload dir %s: %w", uploadDir, err)270 }271 latestFile := filepath.Join(uploadDir, "latest")272 if err := os.WriteFile(273 latestFile, []byte(version), os.FileMode(0o644),274 ); err != nil {275 return fmt.Errorf("write latest version file: %w", err)276 }277 if err := p.client.GSUtil(278 "-m",279 "-h", "Content-Type:text/plain",280 "-h", "Cache-Control:private, max-age=0, no-transform",281 "cp",282 latestFile,283 publishFileDst,284 ); err != nil {285 return fmt.Errorf("copy %s to %s: %w", latestFile, publishFileDst, err)286 }287 var content string288 if !privateBucket {289 // New Kubernetes infra buckets, like k8s-staging-kubernetes, have a290 // bucket-only ACL policy set, which means attempting to set the ACL on291 // an object will fail. We should skip this ACL change in those292 // instances, as new buckets already default to being publicly293 // readable.294 //295 // Ref:296 // - https://cloud.google.com/storage/docs/bucket-policy-only297 // - https://github.com/kubernetes/release/issues/904298 if !strings.HasPrefix(markerPath, object.GcsPrefix+"k8s-") {299 aclOutput, err := p.client.GSUtilOutput(300 "acl", "ch", "-R", "-g", "all:R", publishFileDst,301 )302 if err != nil {303 return fmt.Errorf("change %s permissions: %w", publishFileDst, err)304 }305 logrus.Infof("Making uploaded version file public: %s", aclOutput)306 }307 // If public, validate public link308 response, err := p.client.GetURLResponse(publicLink)309 if err != nil {310 return fmt.Errorf("get content of %s: %w", publicLink, err)311 }312 content = response313 } else {314 response, err := p.client.GSUtilOutput("cat", publicLink)315 if err != nil {316 return fmt.Errorf("get content of %s: %w", publicLink, err)317 }318 content = response319 }320 logrus.Infof("Validating uploaded version file at %s", publicLink)321 if version != content {322 return fmt.Errorf(323 "version %s it not equal response %s",324 version, content,325 )326 }327 logrus.Info("Version equals response")328 return nil329}330// PublishReleaseNotesIndex updates or creates the release notes index JSON at331// the target `gcsIndexRootPath`.332func (p *Publisher) PublishReleaseNotesIndex(333 gcsIndexRootPath, gcsReleaseNotesPath, version string,334) error {335 const releaseNotesIndex = "/release-notes-index.json"336 indexFilePath, err := p.client.NormalizePath(337 gcsIndexRootPath, releaseNotesIndex,338 )339 if err != nil {340 return fmt.Errorf("normalize index file: %w", err)341 }342 logrus.Infof("Publishing release notes index %s", indexFilePath)343 releaseNotesFilePath, err := p.client.NormalizePath(gcsReleaseNotesPath)344 if err != nil {345 return fmt.Errorf("normalize release notes file: %w", err)346 }347 success, err := p.client.GSUtilStatus("-q", "stat", indexFilePath)348 if err != nil {349 return fmt.Errorf("run gcsutil stat: %w", err)350 }351 logrus.Info("Building release notes index")352 versions := make(map[string]string)353 if success {354 logrus.Info("Modifying existing release notes index file")355 tempDir, err := p.client.TempDir("", "release-notes-index-")356 if err != nil {...
bigquery.go
Source:bigquery.go
1package main2import (3 "encoding/json"4 "fmt"5 "net/http"6 "net/url"7 "time"8 "cloud.google.com/go/bigquery" // different API; see ffs below.9 // "google.golang.org/ appengine/taskqueue"10 "github.com/skypies/util/date"11 "github.com/skypies/util/gcp/gcs"12 "github.com/skypies/util/gcp/tasks"13 "github.com/skypies/util/widget"14 "github.com/skypies/flightdb/fgae"15)16var(17 // The bigquery dataset (dest) is an entirely different google cloud project.18 // This, the 'src' project, needs its service worker account to be19 // added as an 'editor' to the 'dest' project, so that we can submit20 // bigquery load requests.21 // Similarly, the service worker from the 'dest' project needs to be added to22 // the 'source' project, so that dest can read the GCS folders. I think.23 // This is in the 'src' project24 folderGCS = "bigquery-flights"25 // This is the 'dest' project26 bigqueryProject = "serfr0-1000"27 bigqueryDataset = "public"28 bigqueryTableName = "flights"29)30// {{{ publishAllFlightsHandler31// http://backend-dot-serfr0-fdb.appspot.com/batch/publish-all-flights?skipload=1&date=range&range_from=2016/07/01&range_to=2016/07/0332// /batch/publish-all-flights?date=range&range_from=2015/08/09&range_to=2015/08/1033// ?skipload=1 (skip loading them into bigquery; it's better to bulk load all of them at once)34// Writes them all into a batch queue35func publishAllFlightsHandler(db fgae.FlightDB, w http.ResponseWriter, r *http.Request) {36 ctx := db.Ctx()37 str := ""38 s,e,_ := widget.FormValueDateRange(r)39 days := date.IntermediateMidnights(s.Add(-1 * time.Second),e) // decrement start, to include it40 taskUrl := "/batch/publish-flights"41 taskClient,err := tasks.GetClient(ctx)42 if err != nil {43 db.Errorf(" publishAllFlightsHandler: GetClient: %v", err)44 http.Error(w, err.Error(), http.StatusInternalServerError)45 return46 }47 for _,day := range days {48 dayStr := day.Format("2006.01.02")49 thisUrl := fmt.Sprintf("%s?datestring=%s", taskUrl, dayStr)50 if r.FormValue("skipload") != "" {51 thisUrl += "&skipload=" + r.FormValue("skipload")52 }53 params := url.Values{}54 if _,err := tasks.SubmitAETask(ctx, taskClient, ProjectID, LocationID, QueueName, 0, thisUrl, params); err != nil {55 db.Errorf(" publishAllFlightsHandler: enqueue: %v", err)56 http.Error(w, err.Error(), http.StatusInternalServerError)57 return58 }59/*60 t := taskqueue.NewPOSTTask(thisUrl, map[string][]string{})61 if _,err := taskqueue.Add(ctx, t, "bigbatch"); err != nil {62 db.Errorf("publishAllFlightsHandler: enqueue: %v", err)63 http.Error(w, err.Error(), http.StatusInternalServerError)64 return65 }66*/67 str += " * posting for " + thisUrl + "\n"68 }69 w.Header().Set("Content-Type", "text/plain")70 w.Write([]byte(fmt.Sprintf("OK, enqueued %d\n--\n%s", len(days), str)))71}72// }}}73// {{{ publishFlightsHandler74// http://backend-dot-serfr0-fdb.appspot.com/backend/publish-flights?datestring=yesterday75// http://backend-dot-serfr0-fdb.appspot.com/backend/publish-flights?datestring=2015.09.1576// As well as writing the data into a file in Cloud Storage, it will submit a load77// request into BigQuery to load that file.78func publishFlightsHandler(db fgae.FlightDB, w http.ResponseWriter, r *http.Request) {79 tStart := time.Now()80 datestring := r.FormValue("datestring")81 if datestring == "yesterday" {82 datestring = date.NowInPdt().AddDate(0,0,-1).Format("2006.01.02")83 }84 filename := "flights-"+datestring+".json"85 db.Infof("Starting /batch/publish-flights: %s", filename)86 87 n,err := writeBigQueryFlightsGCSFile(db, r, datestring, folderGCS, filename)88 if err != nil {89 http.Error(w, err.Error(), http.StatusInternalServerError)90 return91 }92 if r.FormValue("skipload") == "" {93 if err := submitLoadJob(db, r, folderGCS, filename); err != nil {94 http.Error(w, "submitLoadJob failed: "+err.Error(), http.StatusInternalServerError)95 return96 }97 }98 w.Header().Set("Content-Type", "text/plain")99 w.Write([]byte(fmt.Sprintf("OK!\n%d flights written to gs://%s/%s and job sent - took %s\n",100 n, folderGCS, filename, time.Since(tStart))))101}102// }}}103// {{{ writeBigQueryFlightsGCSFile104// Returns number of records written (which is zero if the file already exists)105func writeBigQueryFlightsGCSFile(db fgae.FlightDB, r *http.Request, datestring, foldername,filename string) (int,error) {106 ctx := db.Ctx()107 108 if exists,err := gcs.Exists(ctx, foldername, filename); err != nil {109 return 0,err110 } else if exists {111 return 0,nil112 } 113 gcsHandle,err := gcs.OpenRW(ctx, foldername, filename, "application/json")114 if err != nil {115 return 0,err116 }117 encoder := json.NewEncoder(gcsHandle.IOWriter())118 119 tags := widget.FormValueCommaSpaceSepStrings(r,"tags")120 s := date.Datestring2MidnightPdt(datestring)121 e := s.AddDate(0,0,1).Add(-1 * time.Second) // +23:59:59 (or 22:59 or 24:59 when going in/out DST)122 n := 0123 q := fgae.QueryForTimeRange(tags, s, e)124 it := db.NewIterator(q)125 for it.Iterate(ctx) {126 f := it.Flight()127 // A flight that straddles midnight will have timeslots either128 // side, and so will end up showing in the results for two129 // different days. We don't want dupes in the aggregate output, so130 // we should only include the flight in one of them; we pick the131 // first day. So if the flight's first timeslot does not start132 // after our start-time, skip it.133 if slots := f.Timeslots(); len(slots)>0 && slots[0].Before(s) {134 continue135 }136 137 n++138 fbq := f.ForBigQuery()139 if err := encoder.Encode(fbq); err != nil {140 return 0,err141 }142 }143 if it.Err() != nil {144 return 0,fmt.Errorf("iterator [%s,%s] failed at %s: %v", s,e, time.Now(), it.Err())145 }146 if err := gcsHandle.Close(); err != nil {147 return 0, err148 }149 db.Infof("GCS bigquery file '%s' successfully written", filename)150 return n,nil151}152// }}}153// {{{ submitLoadJob154// https://cloud.google.com/bigquery/docs/loading-data-cloud-storage#bigquery-import-gcs-file-go155func submitLoadJob(db fgae.FlightDB, r *http.Request, gcsfolder, gcsfile string) error {156 ctx := db.Ctx()157 client,err := bigquery.NewClient(ctx, bigqueryProject)158 if err != nil {159 return fmt.Errorf("Creating bigquery client: %v", err)160 }161 myDataset := client.Dataset(bigqueryDataset)162 destTable := myDataset.Table(bigqueryTableName)163 164 gcsSrc := bigquery.NewGCSReference(fmt.Sprintf("gs://%s/%s", gcsfolder, gcsfile))165 gcsSrc.SourceFormat = bigquery.JSON166 gcsSrc.AllowJaggedRows = true167 loader := destTable.LoaderFrom(gcsSrc)168 loader.CreateDisposition = bigquery.CreateNever169 job,err := loader.Run(ctx) 170 if err != nil {171 return fmt.Errorf("Submission of load job: %v", err)172 }173 // https://godoc.org/cloud.google.com/go/bigquery#Copier174/*175 tableDest := &bigquery.Table{176 ProjectID: bigqueryProject,177 DatasetID: bigqueryDataset,178 TableID: bigqueryTableName,179 }180 copier := myDataset.Table(bigqueryTableName).CopierFrom(gcsSrc)181 copier.WriteDisposition = bigquery.WriteAppend182 job,err := copier.Run(ctx)183 if err != nil {184 return fmt.Errorf("Submission of load job: %v", err)185 }186*/ 187 //job,err := client.Copy(ctx, tableDest, gcsSrc, bigquery.WriteAppend)188 //if err != nil {189 // return fmt.Errorf("Submission of load job: %v", err)190 //}191 time.Sleep(5 * time.Second)192 193 if status, err := job.Status(ctx); err != nil {194 return fmt.Errorf("Failure determining status: %v", err)195 } else if err := status.Err(); err != nil {196 detailedErrStr := ""197 for i,innerErr := range status.Errors {198 detailedErrStr += fmt.Sprintf(" [%2d] %v\n", i, innerErr)199 }200 db.Errorf("BiqQuery LoadJob error: %v\n--\n%s", err, detailedErrStr)201 return fmt.Errorf("Job error: %v\n--\n%s", err, detailedErrStr)202 } else {203 db.Infof("BiqQuery LoadJob status: done=%v, state=%s, %s",204 status.Done(), status.State, status)205 }206 207 return nil208}209// }}}210// {{{ -------------------------={ E N D }=----------------------------------211// Local variables:212// folded-file: t213// end:214// }}}...
release_test.go
Source:release_test.go
1package unit_test2import (3 "fmt"4 "os"5 "testing"6 "github.com/stretchr/testify/require"7)8func TestReleaseHelperFunctions(t *testing.T) {9 t.Parallel()10 sc := testReleaseShellScript()11 tcs := []testCase{{12 name: `major_minor_version "v0.2.1"`,13 stdout: lines("0.2"),14 }, {15 name: `major_minor_version "0.2.1"`,16 stdout: lines("0.2"),17 }, {18 name: `patch_version "v0.2.1"`,19 stdout: lines("1"),20 }, {21 name: `patch_version "0.2.1"`,22 stdout: lines("1"),23 }, {24 name: `hash_from_tag "v20010101-deadbeef"`,25 stdout: lines("deadbeef"),26 }}27 for _, tc := range tcs {28 tc := tc29 t.Run(tc.name, tc.test(sc))30 }31}32func TestReleaseFlagParsingVersion(t *testing.T) {33 t.Parallel()34 sc := testReleaseShellScript()35 tcs := []testCase{{36 name: `parse_flags --version`,37 stderr: aborted("missing parameter after --version"),38 }, {39 name: `parse_flags --version a`,40 stderr: aborted("version format must be '[0-9].[0-9].[0-9]'"),41 }, {42 name: `parse_flags --version 0.0`,43 stderr: aborted("version format must be '[0-9].[0-9].[0-9]'"),44 }, {45 name: `parse_flags --version 1.0.0`,46 stdout: empty(),47 }}48 for _, tc := range tcs {49 tc := tc50 t.Run(tc.name, tc.test(sc))51 }52}53func TestReleaseFlagParsingBranch(t *testing.T) {54 t.Parallel()55 sc := testReleaseShellScript()56 tcs := []testCase{{57 name: `parse_flags --branch`,58 stderr: aborted("missing parameter after --branch"),59 }, {60 name: `parse_flags --branch a`,61 stderr: aborted("branch name must be 'release-[0-9].[0-9]'"),62 }, {63 name: `parse_flags --branch 0.0`,64 stderr: aborted("branch name must be 'release-[0-9].[0-9]'"),65 }, {66 name: `parse_flags --branch release-0.0`,67 stdout: empty(),68 }}69 for _, tc := range tcs {70 tc := tc71 t.Run(tc.name, tc.test(sc))72 }73}74func TestReleaseFlagParsingReleaseNotes(t *testing.T) {75 t.Parallel()76 sc := testReleaseShellScript()77 tmpfile := t.TempDir() + "/release-notes.md"78 err := os.WriteFile(tmpfile,79 []byte("# Release Notes\n\n## 1.0.0\n\n* First release\n"),80 0o600)81 require.NoError(t, err)82 tcs := []testCase{{83 name: `parse_flags --release-notes`,84 stderr: aborted("missing parameter after --release-notes"),85 }, {86 name: `parse_flags --release-notes a`,87 stderr: aborted("file a doesn't exist"),88 }, {89 name: `parse_flags --release-notes release-notes.md`,90 commands: []string{fmt.Sprintf(`parse_flags --release-notes %s`, tmpfile)},91 stdout: empty(),92 }}93 for _, tc := range tcs {94 tc := tc95 t.Run(tc.name, tc.test(sc))96 }97}98func TestReleaseFlagParsingReleaseGcsGcr(t *testing.T) {99 t.Parallel()100 sc := testReleaseShellScript()101 tcs := []testCase{{102 name: `parse_flags --release-gcs`,103 stderr: aborted("missing parameter after --release-gcs"),104 }, {105 name: `parse_flags --release-gcs a --publish`,106 stdout: empty(),107 }, {108 name: `parse_flags --release-gcr`,109 stderr: aborted("missing parameter after --release-gcr"),110 }, {111 name: `parse_flags --release-gcr a --publish`,112 stdout: empty(),113 }}114 for _, tc := range tcs {115 tc := tc116 t.Run(tc.name, tc.test(sc))117 }118}119func TestReleaseFlagParsingReleaseConstraints(t *testing.T) {120 t.Parallel()121 sc := testReleaseShellScript()122 tcs := []testCase{{123 name: `parse_flags --dot-release --auto-release`,124 stderr: aborted("cannot have both --dot-release and --auto-release set simultaneously"),125 }, {126 name: `parse_flags --auto-release --version 1.0.0`,127 stderr: aborted("cannot have both --version and --auto-release set simultaneously"),128 }, {129 name: `parse_flags --auto-release --branch release-0.0`,130 stderr: aborted("cannot have both --branch and --auto-release set simultaneously"),131 }, {132 name: `parse_flags --release-gcs a --release-dir b`,133 stderr: aborted("cannot have both --release-gcs and --release-dir set simultaneously"),134 }}135 for _, tc := range tcs {136 tc := tc137 t.Run(tc.name, tc.test(sc))138 }139}140func TestReleaseFlagParsingNightly(t *testing.T) {141 t.Parallel()142 sc := testReleaseShellScript()143 tcs := []testCase{{144 name: `parse_flags --from-nightly`,145 stderr: aborted("missing parameter after --from-nightly"),146 }, {147 name: `parse_flags --from-nightly aaa`,148 stderr: aborted("nightly tag must be 'vYYYYMMDD-commithash'"),149 }}150 for _, tc := range tcs {151 tc := tc152 t.Run(tc.name, tc.test(sc))153 }154}155func TestReleaseFlagParsingGithubToken(t *testing.T) {156 t.Parallel()157 tmpfile := t.TempDir() + "/github.token"158 token := randString(12)159 err := os.WriteFile(tmpfile, []byte(token+"\n"), 0o600)160 require.NoError(t, err)161 sc := testReleaseShellScript()162 tcs := []testCase{{163 name: `parse_flags --github-token`,164 stderr: aborted("missing parameter after --github-token"),165 }, {166 name: `parse_flags --github-token github.token`,167 stdout: lines(token),168 commands: []string{169 fmt.Sprintf(`parse_flags --github-token %s`, tmpfile),170 `echo $GITHUB_TOKEN`,171 },172 }}173 for _, tc := range tcs {174 tc := tc175 t.Run(tc.name, tc.test(sc))176 }177}178func TestReleaseFlagParsingGcsGcrIgnoredValues(t *testing.T) {179 t.Parallel()180 sc := testReleaseShellScript()181 tcs := []testCase{{182 name: `parse_flags --release-gcs foo`,183 stdout: lines("Not publishing the release, GCS flag is ignored"),184 }, {185 name: `parse_flags --release-gcr foo`,186 stdout: lines("Not publishing the release, GCR flag is ignored"),187 }}188 for _, tc := range tcs {189 tc := tc190 t.Run(tc.name, tc.test(sc))191 }192}193func TestReleaseFlagParsingDefaults(t *testing.T) {194 t.Parallel()195 sc := testReleaseShellScript()196 tcs := []testCase{{197 name: `parse_flags`,198 commands: []string{199 "parse_flags",200 `echo :${KO_DOCKER_REPO}:`,201 `echo :${RELEASE_GCS_BUCKET}:`,202 },203 stdout: lines(204 ":ko.local:",205 "::",206 ),207 }, {208 name: `parse_flags --publish --release-dir foo`,209 commands: []string{210 "parse_flags --publish",211 `echo :${KO_DOCKER_REPO}:`,212 `echo :${RELEASE_GCS_BUCKET}:${RELEASE_DIR}:`,213 },214 stdout: lines(215 ":gcr.io/knative-nightly:",216 ":knative-nightly/hack::",217 ),218 }, {219 name: `parse_flags --release-gcr foo --publish`,220 commands: []string{221 "parse_flags --release-gcr foo --publish",222 `echo :${KO_DOCKER_REPO}:`,223 `echo :${RELEASE_GCS_BUCKET}:${RELEASE_DIR}:`,224 },225 stdout: lines(226 ":foo:",227 ":knative-nightly/hack::",228 ),229 }, {230 name: `parse_flags --release-gcs foo --publish`,231 commands: []string{232 "parse_flags --release-gcs foo --publish",233 `echo :${KO_DOCKER_REPO}:`,234 `echo :${RELEASE_GCS_BUCKET}:${RELEASE_DIR}:`,235 },236 stdout: lines(237 ":gcr.io/knative-nightly:",238 ":foo::",239 ),240 }}241 for _, tc := range tcs {242 tc := tc243 t.Run(tc.name, tc.test(sc))244 }245}246func testReleaseShellScript() shellScript {247 return newShellScript(248 fakeProwJob(),249 loadFile("source-release.bash"),250 )251}...
Publish
Using AI Code Generation
1import (2func main() {3 ctx := context.Background()4 client, err := storage.NewClient(ctx)5 if err != nil {6 log.Fatal(err)7 }8 defer client.Close()9 f, err := os.Open(file)10 if err != nil {11 log.Fatal(err)12 }13 defer f.Close()14 wc := client.Bucket(bucket).Object(object).NewWriter(ctx)15 if _, err = io.Copy(wc, f); err != nil {16 log.Fatal(err)17 }18 if err := wc.Close(); err != nil {19 log.Fatal(err)20 }21 fmt.Fprintf(w, "Blob %v uploaded.\n", object)22}23import (24func main() {25 ctx := context.Background()26 client, err := storage.NewClient(ctx)27 if err != nil {28 log.Fatal(err)29 }30 defer client.Close()31 rc, err := client.Bucket(bucket).Object(object).NewReader(ctx)32 if err != nil {33 log.Fatal(err)34 }35 defer rc.Close()36 f, err := os.Create(file)37 if err != nil {38 log.Fatal(err)39 }40 defer f.Close()41 if _, err := io.Copy(f, rc); err != nil {42 log.Fatal(err)43 }44 fmt.Fprintf(w, "Blob %v downloaded.\n", object)45}46import (47func main() {
Publish
Using AI Code Generation
1import (2func main() {3 ctx := context.Background()4 client, err := storage.NewClient(ctx)5 if err != nil {6 log.Fatal(err)7 }8 defer client.Close()9 bucket := client.Bucket("bucket-name")10 bh := client.Bucket("bucket-name")11 wc := bucket.Object("object-name").NewWriter(ctx)12 rc, err := bucket.Object("object-name").NewReader(ctx)13 if err != nil {
Publish
Using AI Code Generation
1import (2func main() {3 ctx := context.Background()4 client, err := storage.NewClient(ctx)5 if err != nil {6 log.Fatal(err)7 }8 bucket := client.Bucket("my-bucket")9 wc := bucket.Object("foo").NewWriter(ctx)10 if _, err := wc.Write([]byte("bar")); err != nil {
Publish
Using AI Code Generation
1func main() {2 ctx := context.Background()3 client, err := storage.NewClient(ctx)4 if err != nil {5 log.Fatal(err)6 }7 defer client.Close()8 bucket := client.Bucket(bucketName)9 if err := bucket.Create(ctx, "project-id", nil); err != nil {10 log.Fatal(err)11 }12 fmt.Printf("Bucket %v created.\n", bucketName)13}14func main() {15 ctx := context.Background()16 client, err := storage.NewClient(ctx)17 if err != nil {18 log.Fatal(err)19 }20 defer client.Close()21 bucket := client.Bucket(bucketName)22 if err := bucket.Create(ctx, "project-id", nil); err != nil {23 log.Fatal(err)24 }25 wc := bucket.Object(objectName).NewWriter(ctx)26 if _, err := wc.Write([]byte("Hello World!")); err != nil {27 log.Fatal(err)28 }29 if err := wc.Close(); err != nil {30 log.Fatal(err)31 }32 fmt.Printf("Object %v created.\n", objectName)33}34func main() {35 ctx := context.Background()36 client, err := storage.NewClient(ctx)37 if err != nil {38 log.Fatal(err)39 }40 defer client.Close()41 bucket := client.Bucket(bucketName)42 if err := bucket.Create(ctx, "project-id", nil); err != nil {43 log.Fatal(err)44 }45 wc := bucket.Object(objectName).NewWriter(ctx
Publish
Using AI Code Generation
1import (2func main() {3 file, err := os.Open("2.go")4 if err != nil {5 log.Fatal(err)6 }7 client, err := storage.NewClient()8 if err != nil {9 log.Fatal(err)10 }11 bucket := client.Bucket("test-bucket-1")12 object := bucket.Object("2.go")13 w := object.NewWriter()14 if _, err := io.Copy(w, file); err != nil {15 log.Fatal(err)16 }17 if err := w.Close(); err != nil {18 log.Fatal(err)19 }20 fmt.Println("Done")21}
Publish
Using AI Code Generation
1import (2func main() {3 ctx := appengine.NewContext(r)4 bucket := storage.NewBucket(ctx, "my-bucket")5 obj := bucket.Object("my-object")6 w := obj.NewWriter(ctx)7 fmt.Fprint(w, "Hello, world!")8 if err := w.Close(); err != nil {9 log.Errorf(ctx, "Failed to write object: %v", err)10 }11}12import (13func main() {14 ctx := appengine.NewContext(r)15 bucket := storage.NewBucket(ctx, "my-bucket")16 obj := bucket.Object("my-object")17 r, err := obj.NewReader(ctx)18 if err != nil {19 log.Errorf(ctx, "Failed to open object: %v", err)20 }21 b, err := ioutil.ReadAll(r)22 if err != nil {23 log.Errorf(ctx, "Failed to read object: %v", err)24 }25 if err := r.Close(); err != nil {26 log.Errorf(ctx, "Failed to close object: %v", err)27 }28}29import (30func main() {31 ctx := appengine.NewContext(r)32 bucket := storage.NewBucket(ctx, "my-bucket")33 obj := bucket.Object("my-object")34 if err := obj.Delete(ctx); err != nil {35 log.Errorf(ctx, "Failed to delete object: %v", err)36 }37}38import (39func main() {40 ctx := appengine.NewContext(r)41 bucket := storage.NewBucket(ctx, "my-bucket")42 q := &storage.Query{Prefix: "my-object"}43 objs, err := bucket.List(ctx, q)
Publish
Using AI Code Generation
1import (2func main() {3 gcs := gcs{}4 gcs.Publish()5}6import (7func main() {8 gcs := gcs{}9 gcs.NewStorageService()10}11import (12func main() {13 gcs := gcs{}14 gcs.NewService()15}16import (17func main() {18 gcs := gcs{}19 gcs.NewClient()20}21import (22func main() {23 gcs := gcs{}24 gcs.NewTransport()25}26import (27func main() {28 gcs := gcs{}
Publish
Using AI Code Generation
1import (2func main() {3 gcs := storage.NewGCS()4 gcs.Publish("bucket", "object")5}6import (7func main() {8 gcs := storage.NewGCS()9 gcs.Subscribe("bucket", "object")10}11import (12func main() {13 gcs := storage.NewGCS()14 gcs.Unsubscribe("bucket", "object")15}16import (17func main() {18 gcs := storage.NewGCS()19 gcs.Delete("bucket", "object")20}21import (22func main() {23 gcs := storage.NewGCS()24 gcs.Get("bucket", "object")25}26import (27func main() {28 gcs := storage.NewGCS()29 gcs.GetMetadata("bucket", "object")30}31import (32func main() {33 gcs := storage.NewGCS()34 gcs.List("bucket", "object")35}36import (37func main() {
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!