Best Gauge code snippet using parser.addPrefix
parser.go
Source:parser.go
...70 errorwords.Jerror()71 p := &Parser{l: l, errors: []Err{}}72 p.prefixParseFns = make(map[token.TokenType]prefixParseFn)73 p.infixParseFns = make(map[token.TokenType]infixParseFn)74 p.addPrefix(token.IF, p.parseIf)75 p.addPrefix(token.FUNCTION, p.parseFunction)76 //values77 p.addPrefix(token.IDENT, p.parseIdent)78 p.addPrefix(token.INT, p.parseInt)79 p.addPrefix(token.FLOAT, p.parseFloat)80 p.addPrefix(token.TRUE, p.parseBool)81 p.addPrefix(token.FALSE, p.parseBool)82 p.addPrefix(token.STRING, p.parseString)83 //84 p.addPrefix(token.LPAREN, p.parsegroup)85 p.addPrefix(token.LBRACKET, p.parseArray)86 //prefix87 p.addPrefix(token.MINUS, p.parsePrefix)88 p.addPrefix(token.EXCLA, p.parsePrefix)89 //infix90 p.addInfix(token.Equal, p.parseInfix)91 p.addInfix(token.Nequal, p.parseInfix)92 p.addInfix(token.AND, p.parseInfix)93 p.addInfix(token.OR, p.parseInfix)94 p.addInfix(token.RT, p.parseInfix)95 p.addInfix(token.LT, p.parseInfix)96 p.addInfix(token.ERT, p.parseInfix)97 p.addInfix(token.ELT, p.parseInfix)98 p.addInfix(token.PLUS, p.parseInfix)99 p.addInfix(token.MINUS, p.parseInfix)100 p.addInfix(token.ASTERISK, p.parseInfix)101 p.addInfix(token.SLASH, p.parseInfix)102 p.addInfix(token.PERCENT, p.parseInfix)103 //104 p.addInfix(token.LPAREN, p.parseCall)105 p.addInfix(token.LBRACKET, p.parseIndex)106 //107 p.nextToken()108 p.nextToken()109 return p110}111//nextToken Get next token and input to Parser112func (p *Parser) nextToken() {113 p.nowToken = p.readToken114 p.readToken = p.l.NextToken()115}116//Parse Main parse program117func (p *Parser) Parse() *ast.Root {118 root := &ast.Root{Statements: []ast.Statement{}}119 for p.nowToken.Type != token.EOF {120 if stmt := p.parseStmt(); stmt != nil {121 root.Statements = append(root.Statements, stmt)122 }123 p.nextToken()124 }125 return root126}127//parseStmt Do parsing (choose correct parsefunc)128func (p *Parser) parseStmt() ast.Statement {129 switch p.nowToken.Type {130 case token.MAKE:131 return p.parseMakeStmt()132 case token.RETURN:133 return p.parseReturnStmt()134 case token.LOOP:135 return p.parseLoop()136 case token.STOP:137 return p.parseStopStmt()138 default:139 return p.parseExprStmt()140 }141}142//parseMakeStmt return parsed statement and check statement143func (p *Parser) parseMakeStmt() *ast.Make {144 makestmt := &ast.Make{Token: p.nowToken}145 //expect tokenType is IDENT146 if !p.expect(token.IDENT) {147 return nil148 }149 makestmt.Name = &ast.Identifier{Token: p.nowToken, Value: p.nowToken.Literal}150 if !p.expect(token.ASSIGN) {151 return nil152 }153 p.nextToken()154 makestmt.Value = p.parseExpression(LOWEST)155 if p.readToken.Type == token.SEMICOLON {156 p.nextToken()157 }158 return makestmt159}160//parseRetutnStmt return parsed statement and check statement161func (p *Parser) parseReturnStmt() *ast.Return {162 returnstmt := &ast.Return{Token: p.nowToken}163 p.nextToken()164 returnstmt.Value = p.parseExpression(LOWEST)165 if p.readToken.Type == token.SEMICOLON {166 p.nextToken()167 }168 return returnstmt169}170//parseStopStmt return parsed statement and check statement171func (p *Parser) parseStopStmt() *ast.Stop {172 stopstmt := &ast.Stop{Token: p.nowToken}173 if p.readToken.Type == token.SEMICOLON {174 p.nextToken()175 }176 return stopstmt177}178//parse expressionstatement179func (p *Parser) parseExprStmt() *ast.ExpressionStatement {180 stmt := &ast.ExpressionStatement{Token: p.nowToken}181 stmt.Expression = p.parseExpression(LOWEST)182 if p.readToken.Type == token.SEMICOLON {183 p.nextToken()184 }185 return stmt186}187//parse expression188func (p *Parser) parseExpression(level int) ast.Expression {189 // search prefix parse functions190 prefn := p.prefixParseFns[p.nowToken.Type]191 if prefn == nil {192 if p.nowToken.Type == token.ASSIGN {193 p.setError(122, p.nowToken.Line, p.nowToken.Literal)194 return nil195 }196 p.setError(121, p.nowToken.Line, p.nowToken.Literal)197 return nil198 }199 //call prefix parse function and put it into left200 left := prefn()201 for level < p.nextTokenPriority() {202 infn := p.infixParseFns[p.readToken.Type]203 if infn == nil {204 return left205 }206 p.nextToken()207 left = infn(left)208 }209 return left210}211//parseIfExpression return parsed expression and check exoression212func (p *Parser) parseIf() ast.Expression {213 ifexp := &ast.If{Token: p.nowToken}214 p.nextToken()215 ifexp.Condition = p.parseExpression(LOWEST)216 if !p.expect(token.LBRACE) {217 return nil218 }219 ifexp.Consequence = p.parseBlockstmt()220 if p.nextTokenType(token.ELSE) {221 p.nextToken()222 if p.nextTokenType(token.IF) {223 ifexp.Alternative = p.parseBlockstmt()224 return ifexp225 }226 if !p.expect(token.LBRACE) {227 return nil228 }229 ifexp.Alternative = p.parseBlockstmt()230 }231 return ifexp232}233//parseLoop Statement234func (p *Parser) parseLoop() ast.Statement {235 lpexp := &ast.Loop{Token: p.nowToken}236 if p.nextTokenType(token.LBRACE) {237 truetoken := token.Token{Type: token.TRUE, Literal: "true", Line: p.nowToken.Line}238 lpexp.Condition = &ast.Bool{Token: truetoken, Value: true}239 } else {240 p.nextToken()241 lpexp.Condition = p.parseExpression(LOWEST)242 }243 if !p.expect(token.LBRACE) {244 return nil245 }246 lpexp.Process = p.parseBlockstmt()247 if p.readToken.Type == token.SEMICOLON {248 p.nextToken()249 }250 return lpexp251}252//parseFunction return parsed expression and check expression253func (p *Parser) parseFunction() ast.Expression {254 fnexp := &ast.Function{Token: p.nowToken}255 if p.nextTokenType(token.IDENT) {256 p.nextToken()257 fnexp.Name = &ast.Identifier{Token: p.nowToken, Value: p.nowToken.Literal}258 }259 if !p.expect(token.LPAREN) {260 return nil261 }262 fnexp.Parameters = p.parseFnParams()263 if !p.expect(token.LBRACE) {264 return nil265 }266 fnexp.Process = p.parseBlockstmt()267 return fnexp268}269//parse Function Parameters270func (p *Parser) parseFnParams() []*ast.Identifier {271 idents := []*ast.Identifier{}272 if p.nextTokenType(token.RPAREN) {273 p.nextToken()274 return idents275 }276 p.nextToken()277 ident := &ast.Identifier{Token: p.nowToken, Value: p.nowToken.Literal}278 idents = append(idents, ident)279 for p.nextTokenType(token.COMMA) {280 p.nextToken()281 p.nextToken()282 ident := &ast.Identifier{Token: p.nowToken, Value: p.nowToken.Literal}283 idents = append(idents, ident)284 }285 if !p.expect(token.RPAREN) {286 return nil287 }288 return idents289}290//parseCall return parsed expression and check expression291func (p *Parser) parseCall(function ast.Expression) ast.Expression {292 callexp := &ast.Call{Token: p.nowToken, Function: function}293 callexp.Arguments = p.parseList(token.RPAREN)294 return callexp295}296//parseArray return parsed expression and check expression297func (p *Parser) parseArray() ast.Expression {298 ar := &ast.Array{Token: p.nowToken}299 ar.Elements = p.parseList(token.RBRACKET)300 return ar301}302//parse List(Argumetns,Elements) to []ast.Expression303func (p *Parser) parseList(end token.TokenType) []ast.Expression {304 list := []ast.Expression{}305 if p.nextTokenType(end) {306 p.nextToken()307 return list308 }309 p.nextToken()310 list = append(list, p.parseExpression(LOWEST))311 for p.nextTokenType(token.COMMA) {312 p.nextToken()313 p.nextToken()314 list = append(list, p.parseExpression(LOWEST))315 }316 if !p.expect(end) {317 return nil318 }319 return list320}321//parseIndex return parsed expression and check expression322func (p *Parser) parseIndex(left ast.Expression) ast.Expression {323 ix := &ast.Index{Token: p.nowToken, Left: left}324 p.nextToken()325 ix.Index = p.parseExpression(LOWEST)326 if !p.expect(token.RBRACKET) {327 return nil328 }329 return ix330}331//parse Block Statements332func (p *Parser) parseBlockstmt() *ast.BlockStmt {333 var bs *ast.BlockStmt334 bs = &ast.BlockStmt{Token: p.nowToken, Statements: []ast.Statement{}}335 p.nextToken()336 for !p.nowTokenType(token.RBRACE) && !p.nowTokenType(token.EOF) {337 stmt := p.parseStmt()338 if stmt != nil {339 bs.Statements = append(bs.Statements, stmt)340 }341 p.nextToken()342 }343 return bs344}345//parse Identifier346func (p *Parser) parseIdent() ast.Expression {347 if strings.Index(p.nowToken.Literal, "ã") != -1 {348 p.setError(130, p.nowToken.Line)349 }350 if p.nextTokenType(token.ASSIGN) {351 tok := p.nowToken352 name := &ast.Identifier{Token: tok, Value: p.nowToken.Literal}353 p.nextToken()354 p.nextToken()355 value := p.parseExpression(LOWEST)356 if p.readToken.Type == token.SEMICOLON {357 p.nextToken()358 }359 return &ast.Assign{Token: tok, Name: name, Value: value}360 }361 return &ast.Identifier{Token: p.nowToken, Value: p.nowToken.Literal}362}363//parse Intenger364func (p *Parser) parseInt() ast.Expression {365 inted, err := strconv.ParseInt(p.nowToken.Literal, 10, 0)366 if err != nil {367 p.setError(111, p.nowToken.Line, p.nowToken.Literal)368 return nil369 }370 return &ast.Int{Token: p.nowToken, Value: inted}371}372//parse Intenger373func (p *Parser) parseFloat() ast.Expression {374 floated, err := strconv.ParseFloat(p.nowToken.Literal, 64)375 if err != nil {376 p.setError(112, p.nowToken.Line, p.nowToken.Literal)377 return nil378 }379 return &ast.Float{Token: p.nowToken, Value: floated}380}381//parse Boolean382func (p *Parser) parseBool() ast.Expression {383 return &ast.Bool{Token: p.nowToken, Value: p.nowTokenType(token.TRUE)}384}385//parse String Literal386func (p *Parser) parseString() ast.Expression {387 return &ast.String{Token: p.nowToken, Value: p.nowToken.Literal}388}389//parse Grouped Expression390func (p *Parser) parsegroup() ast.Expression {391 p.nextToken()392 ex := p.parseExpression(LOWEST)393 if !p.expect(token.RPAREN) {394 return nil395 }396 return ex397}398//parse Prefix Expression399func (p *Parser) parsePrefix() ast.Expression {400 ex := &ast.Prefix{Token: p.nowToken, Operator: p.nowToken.Literal}401 p.nextToken()402 ex.Value = p.parseExpression(PREFIX)403 return ex404}405//parse Infix Expression406func (p *Parser) parseInfix(left ast.Expression) ast.Expression {407 ex := &ast.Infix{408 Token: p.nowToken,409 Operator: p.nowToken.Literal,410 Left: left,411 }412 level := p.nowTokenPriority()413 p.nextToken()414 ex.Right = p.parseExpression(level)415 return ex416}417//expect next token and error check418func (p *Parser) expect(t token.TokenType) bool {419 if p.readToken.Type == t {420 p.nextToken()421 return true422 }423 p.setError(101, p.readToken.Line, t, p.readToken.Literal)424 return false425}426//setError set error427func (p *Parser) setError(code int, params ...interface{}) {428 message := fmt.Sprintf(errorwords.Err[code], params...)429 line := params[0].(int)430 p.errors = append(p.errors, Err{Message: message, Line: line})431}432//getError get error433func (p *Parser) GetError() []Err {434 return p.errors435}436//add fixfunction to functionsmap437func (p *Parser) addPrefix(tt token.TokenType, fn prefixParseFn) {438 p.prefixParseFns[tt] = fn439}440func (p *Parser) addInfix(tt token.TokenType, fn infixParseFn) {441 p.infixParseFns[tt] = fn442}443func (p *Parser) nowTokenType(t token.TokenType) bool {444 if t == p.nowToken.Type {445 return true446 }447 return false448}449func (p *Parser) nextTokenType(t token.TokenType) bool {450 if t == p.readToken.Type {451 return true...
genbank.go
Source:genbank.go
1package seqio2import (3 "fmt"4 "io"5 "strconv"6 "strings"7 "github.com/go-ascii/ascii"8 "github.com/go-gts/gts"9 "github.com/go-pars/pars"10 "github.com/go-wrap/wrap"11)12const defaultGenBankIndent = " "13// FieldFormatter represents a function for formatting a field.14type FieldFormatter func(name, value string) string15// ExtraField represents an uncommon field of a genome flat-file.16type ExtraField struct {17 Name string18 Value string19 Format func(name, value string) string20}21// String satisfies the fmt.Stringer interface.22func (field ExtraField) String() string {23 return field.Format(field.Name, field.Value)24}25func genbankFieldFormatter(name, value string) string {26 value = AddPrefix(value, defaultGenBankIndent)27 return fmt.Sprintf("%-12s%s", name, value)28}29// GenBankExtraField creates a new extra field with a default formatter.30func GenBankExtraField(name, value string) ExtraField {31 return ExtraField{name, value, genbankFieldFormatter}32}33// GenBankFields represents the fields of a GenBank record other than the34// features and sequence.35type GenBankFields struct {36 LocusName string37 Molecule gts.Molecule38 Topology gts.Topology39 Division string40 Date Date41 Definition string42 Accession string43 Version string44 DBLink Dictionary45 Keywords []string46 Source Organism47 References []Reference48 Comments []string49 Extra []ExtraField50 Contig Contig51 Region gts.Region // Appears in sliced files.52}53// Slice returns a metadata sliced with the given region.54func (gbf GenBankFields) Slice(start, end int) interface{} {55 gbf.Region = gts.Segment{start, end}56 prefix := gbf.Molecule.Counter()57 parser := parseReferenceInfo(prefix)58 tryParse := func(info string) ([]gts.Ranged, bool) {59 result, err := parser.Parse(pars.FromString(info))60 if err != nil {61 return nil, false62 }63 return result.Value.([]gts.Ranged), true64 }65 refs := []Reference{}66 for _, ref := range gbf.References {67 info := ref.Info68 locs, ok := tryParse(info)69 switch {70 case ok:71 olap := []gts.Ranged{}72 for _, loc := range locs {73 if gts.LocationOverlap(loc, start, end) {74 olap = append(olap, loc)75 }76 }77 if len(olap) > 0 {78 ss := make([]string, len(olap))79 for i, loc := range olap {80 head, tail := loc.Start, loc.End81 head = gts.Max(0, head-start)82 tail = gts.Min(end-start, tail-start)83 ss[i] = fmt.Sprintf("%d to %d", head+1, tail)84 }85 ref.Info = fmt.Sprintf("(%s %s)", prefix, strings.Join(ss, "; "))86 refs = append(refs, ref)87 }88 default:89 refs = append(refs, ref)90 }91 }92 for i := range refs {93 refs[i].Number = i + 194 }95 gbf.References = refs96 return gbf97}98// ID returns the ID of the sequence.99func (gbf GenBankFields) ID() string {100 if gbf.Version != "" {101 return gbf.Version102 }103 if gbf.Accession != "" {104 return gbf.Accession105 }106 return gbf.LocusName107}108// String satisifes the fmt.Stringer interface.109func (gbf GenBankFields) String() string {110 if seg, ok := gbf.Region.(gts.Segment); ok {111 head, tail := gts.Unpack(seg)112 return fmt.Sprintf("%s:%d-%d %s", gbf.Version, head+1, tail, gbf.Definition)113 }114 return fmt.Sprintf("%s %s", gbf.Version, gbf.Definition)115}116// GenBank represents a GenBank sequence record.117type GenBank struct {118 Fields GenBankFields119 Table gts.FeatureSlice120 Origin *Origin121}122// Info returns the metadata of the sequence.123func (gb GenBank) Info() interface{} {124 return gb.Fields125}126// Features returns the feature table of the sequence.127func (gb GenBank) Features() gts.FeatureSlice {128 return gb.Table129}130// Len returns the length of the sequence.131func (gb GenBank) Len() int {132 return gb.Origin.Len()133}134// Bytes returns the byte representation of the sequence.135func (gb GenBank) Bytes() []byte {136 return gb.Origin.Bytes()137}138// WithInfo creates a shallow copy of the given Sequence object and swaps the139// metadata with the given value.140func (gb GenBank) WithInfo(info interface{}) gts.Sequence {141 switch v := info.(type) {142 case GenBankFields:143 return GenBank{v, gb.Table, gb.Origin}144 default:145 return gts.New(v, gb.Features(), gb.Bytes())146 }147}148// WithFeatures creates a shallow copy of the given Sequence object and swaps149// the feature table with the given features.150func (gb GenBank) WithFeatures(ff []gts.Feature) gts.Sequence {151 return GenBank{gb.Fields, ff, gb.Origin}152}153// WithBytes creates a shallow copy of the given Sequence object and swaps the154// byte representation with the given byte slice.155func (gb GenBank) WithBytes(p []byte) gts.Sequence {156 return GenBank{gb.Fields, gb.Table, NewOrigin(p)}157}158// WithTopology creates a shallow copy of the given Sequence object and swaps159// the topology value with the given value.160func (gb GenBank) WithTopology(t gts.Topology) gts.Sequence {161 info := gb.Fields162 info.Topology = t163 return gb.WithInfo(info)164}165// String satisifes the fmt.Stringer interface.166func (gb GenBank) String() string {167 b := strings.Builder{}168 indent := defaultGenBankIndent169 length := gb.Origin.Len()170 if length == 0 {171 length = gb.Fields.Contig.Region.Len()172 }173 date := strings.ToUpper(gb.Fields.Date.ToTime().Format("02-Jan-2006"))174 locus := fmt.Sprintf(175 "%-12s%-17s %10d bp %6s %-9s%s %s", "LOCUS", gb.Fields.LocusName,176 length, gb.Fields.Molecule, gb.Fields.Topology, gb.Fields.Division, date,177 )178 b.WriteString(locus + "\n")179 definition := AddPrefix(gb.Fields.Definition, indent)180 b.WriteString("DEFINITION " + definition + ".\n")181 b.WriteString("ACCESSION " + gb.Fields.Accession)182 if seg, ok := gb.Fields.Region.(gts.Segment); ok {183 loc := gts.Range(gts.Unpack(seg))184 b.WriteString(fmt.Sprintf(" REGION: %s", loc))185 }186 b.WriteByte('\n')187 b.WriteString("VERSION " + gb.Fields.Version + "\n")188 for i, pair := range gb.Fields.DBLink {189 switch i {190 case 0:191 b.WriteString("DBLINK ")192 default:193 b.WriteString(indent)194 }195 b.WriteString(fmt.Sprintf("%s: %s\n", pair.Key, pair.Value))196 }197 keywords := wrap.Space(strings.Join(gb.Fields.Keywords, "; ")+".", 67)198 keywords = AddPrefix(keywords, indent)199 b.WriteString("KEYWORDS " + keywords + "\n")200 source := wrap.Space(gb.Fields.Source.Species, 67)201 source = AddPrefix(source, indent)202 b.WriteString("SOURCE " + source + "\n")203 organism := wrap.Space(gb.Fields.Source.Name, 67)204 organism = AddPrefix(organism, indent)205 b.WriteString(" ORGANISM " + organism + "\n")206 taxon := wrap.Space(strings.Join(gb.Fields.Source.Taxon, "; ")+".", 67)207 taxon = AddPrefix(taxon, indent)208 b.WriteString(indent + taxon + "\n")209 for _, ref := range gb.Fields.References {210 b.WriteString(fmt.Sprintf("REFERENCE %d", ref.Number))211 if ref.Info != "" {212 pad := strings.Repeat(" ", 3-len(strconv.Itoa(ref.Number)))213 b.WriteString(pad + ref.Info)214 }215 b.WriteByte('\n')216 if ref.Authors != "" {217 b.WriteString(" AUTHORS " + AddPrefix(ref.Authors, indent) + "\n")218 }219 if ref.Group != "" {220 b.WriteString(" CONSRTM " + AddPrefix(ref.Group, indent) + "\n")221 }222 if ref.Title != "" {223 b.WriteString(" TITLE " + AddPrefix(ref.Title, indent) + "\n")224 }225 if ref.Journal != "" {226 b.WriteString(" JOURNAL " + AddPrefix(ref.Journal, indent) + "\n")227 }228 if ref.Xref != nil {229 if v, ok := ref.Xref["PUBMED"]; ok {230 b.WriteString(" PUBMED " + v + "\n")231 }232 }233 if ref.Comment != "" {234 b.WriteString(" REMARK " + AddPrefix(ref.Comment, indent) + "\n")235 }236 }237 for _, comment := range gb.Fields.Comments {238 b.WriteString("COMMENT " + AddPrefix(comment, indent) + "\n")239 }240 for _, extra := range gb.Fields.Extra {241 b.WriteString(extra.String() + "\n")242 }243 b.WriteString("FEATURES Location/Qualifiers\n")244 fmtr := INSDCFormatter{gb.Table, " ", 21}245 fmtr.WriteTo(&b)246 b.WriteByte('\n')247 if gb.Fields.Contig.String() != "" {248 b.WriteString(fmt.Sprintf("CONTIG %s\n", gb.Fields.Contig))249 }250 if gb.Origin.Len() > 0 {251 b.WriteString("ORIGIN \n")252 b.WriteString(gb.Origin.String())253 }254 b.WriteString("//\n")255 return b.String()256}257// WriteTo satisfies the io.WriterTo interface.258func (gb GenBank) WriteTo(w io.Writer) (int64, error) {259 n, err := io.WriteString(w, gb.String())260 return int64(n), err261}262// GenBankWriter writes a gts.Sequence to an io.Writer in GenBank format.263type GenBankWriter struct {264 w io.Writer265}266// WriteSeq satisfies the seqio.SeqWriter interface.267func (w GenBankWriter) WriteSeq(seq gts.Sequence) (int, error) {268 switch v := seq.(type) {269 case GenBank:270 n, err := v.WriteTo(w.w)271 return int(n), err272 case *GenBank:273 return w.WriteSeq(*v)274 default:275 switch info := v.Info().(type) {276 case GenBankFields:277 gb := GenBank{info, v.Features(), NewOrigin(v.Bytes())}278 return w.WriteSeq(gb)279 default:280 return 0, fmt.Errorf("gts does not know how to format a sequence with metadata of type `%T` as GenBank", info)281 }282 }283}284var genbankLocusParser = pars.Seq(285 "LOCUS", pars.Spaces,286 pars.Word(ascii.Not(ascii.IsSpace)), pars.Spaces,287 pars.Int, pars.Any(" bp", " aa"), pars.Spaces,288 pars.Word(ascii.Not(ascii.IsSpace)), pars.Spaces,289 pars.Word(ascii.Not(ascii.IsSpace)), pars.Spaces,290 pars.Maybe(pars.Count(pars.Filter(ascii.IsUpper), 3).Map(pars.Cat)),291 pars.Spaces,292 pars.AsParser(pars.Line).Map(func(result *pars.Result) (err error) {293 s := string(result.Token)294 date, err := AsDate(s)295 result.SetValue(date)296 return err297 }),298).Children(1, 2, 4, 7, 9, 11, 13)299func tryAllParsers(pp []pars.Parser) pars.Parser {300 return func(state *pars.State, result *pars.Result) (err error) {301 for _, p := range pp {302 state.Push()303 err = p(state, result)304 if err == nil {305 state.Drop()306 return nil307 }308 if !state.Pushed() {309 return err310 }311 state.Pop()312 }313 return err314 }315}316// GenBankParser attempts to parse a single GenBank record.317func GenBankParser(state *pars.State, result *pars.Result) error {318 if err := genbankLocusParser(state, result); err != nil {319 return err320 }321 state.Clear()322 depth := len(result.Children[0].Token) + 5323 locus := string(result.Children[1].Token)324 length := result.Children[2].Value.(int)325 molecule, err := gts.AsMolecule(string(result.Children[3].Token))326 if err != nil {327 return pars.NewError(err.Error(), state.Position())328 }329 topology, err := gts.AsTopology(string(result.Children[4].Token))330 if err != nil {331 return pars.NewError(err.Error(), state.Position())332 }333 division := string(result.Children[5].Token)334 date := result.Children[6].Value.(Date)335 gb := &GenBank{Fields: GenBankFields{336 LocusName: locus,337 Molecule: molecule,338 Topology: topology,339 Division: division,340 Date: date,341 Region: nil,342 }, Origin: NewOrigin(nil)}343 genbankOriginParser := makeGenbankOriginParser(length)344 generators := []genbankSubparser{345 genbankDefinitionParser,346 genbankAccessionParser,347 genbankVersionParser,348 genbankDBLinkParser,349 genbankKeywordsParser,350 genbankSourceParser,351 genbankReferenceParser,352 genbankCommentParser,353 genbankFeatureParser,354 genbankContigParser,355 genbankOriginParser,356 genbankExtraFieldParser,357 }358 subparsers := make([]pars.Parser, len(generators))359 for i, generate := range generators {360 subparsers[i] = generate(gb, depth)361 }362 parser := tryAllParsers(subparsers)363 end := pars.Seq("//", pars.EOL)364 for end(state, result) != nil {365 if err := parser(state, result); err != nil {366 if dig(err) != errGenBankExtra {367 return err368 }369 pars.Line(state, result)370 if pars.End(state, result) == nil {371 return errGenBankField372 }373 }374 }375 result.SetValue(*gb)376 return nil377}...
parse.go
Source:parse.go
1package config2import (3 "io/ioutil"4 "net/url"5 "path/filepath"6 "github.com/DaRealFreak/epub-scraper/pkg/raven"7 "gopkg.in/yaml.v2"8)9// Parser is a struct solely to prevent expose functions without setting up first10type Parser struct{}11// NewParser returns a pointer to an initialized parser struct12func NewParser() *Parser {13 return &Parser{}14}15// ReadConfigurationFile tries to read the passed configuration file and parse it into a NovelConfig struct16func (p *Parser) ReadConfigurationFile(fileName string) (novelConfig *NovelConfig, err error) {17 content, err := ioutil.ReadFile(filepath.Clean(fileName))18 if err != nil {19 return nil, err20 }21 err = yaml.Unmarshal(content, &novelConfig)22 if err != nil {23 return nil, err24 }25 // set base directory for includes and the like26 baseDirectory := filepath.Dir(fileName)27 novelConfig.BaseDirectory, err = filepath.Abs(baseDirectory)28 p.mergeSourceConfigSiteConfig(novelConfig)29 return novelConfig, err30}31// mergeSourceConfigSiteConfig merges the chapter configuration with the site configuration32// or sets the default values in case neither the chapter nor the site configuration has a value set33func (p *Parser) mergeSourceConfigSiteConfig(novelConfig *NovelConfig) {34 for _, source := range novelConfig.Chapters {35 if source.Toc != nil {36 tocURL, err := url.Parse(source.Toc.URL)37 raven.CheckError(err)38 site := novelConfig.GetSiteConfigFromURL(tocURL)39 p.updatePagination(&source.Toc.Pagination, &site.Pagination)40 p.updateTitleContent(&source.Toc.TitleContent, &site.TitleContent)41 p.updateChapterContent(&source.Toc.ChapterContent, &site.ChapterContent)42 }43 if source.Chapter != nil {44 tocURL, err := url.Parse(source.Chapter.URL)45 raven.CheckError(err)46 site := novelConfig.GetSiteConfigFromURL(tocURL)47 p.updateTitleContent(&source.Chapter.TitleContent, &site.TitleContent)48 p.updateChapterContent(&source.Chapter.ChapterContent, &site.ChapterContent)49 }50 }51}52// updatePagination updates specifically the Pagination struct of the chapter/site configuration53func (p *Parser) updatePagination(sourceConfig *Pagination, siteConfig *Pagination) {54 if sourceConfig.ReversePosts == nil {55 if siteConfig.ReversePosts == nil {56 var siteConfigDefault bool57 siteConfig.ReversePosts = &siteConfigDefault58 }59 sourceConfig.ReversePosts = siteConfig.ReversePosts60 }61 if sourceConfig.NextPageSelector == nil {62 if siteConfig.NextPageSelector == nil {63 var siteConfigDefault string64 siteConfig.NextPageSelector = &siteConfigDefault65 }66 sourceConfig.NextPageSelector = siteConfig.NextPageSelector67 }68}69// updateChapterContent updates specifically the ChapterContent struct of the chapter/site configuration70func (p *Parser) updateChapterContent(sourceConfig *ChapterContent, siteConfig *ChapterContent) {71 if sourceConfig.ContentSelector == nil {72 if siteConfig.ContentSelector == nil {73 var siteConfigDefault string74 siteConfig.ContentSelector = &siteConfigDefault75 }76 sourceConfig.ContentSelector = siteConfig.ContentSelector77 }78 if sourceConfig.CleanupOptions.PrefixSelectors == nil {79 if siteConfig.CleanupOptions.PrefixSelectors == nil {80 var siteConfigDefault []string81 siteConfig.CleanupOptions.PrefixSelectors = &siteConfigDefault82 }83 sourceConfig.CleanupOptions.PrefixSelectors = siteConfig.CleanupOptions.PrefixSelectors84 }85 if sourceConfig.CleanupOptions.SuffixSelectors == nil {86 if siteConfig.CleanupOptions.SuffixSelectors == nil {87 var siteConfigDefault []string88 siteConfig.CleanupOptions.SuffixSelectors = &siteConfigDefault89 }90 sourceConfig.CleanupOptions.SuffixSelectors = siteConfig.CleanupOptions.SuffixSelectors91 }92 if sourceConfig.CleanupOptions.StripRegex == "" && siteConfig.CleanupOptions.StripRegex != "" {93 sourceConfig.CleanupOptions.StripRegex = siteConfig.CleanupOptions.StripRegex94 }95 if sourceConfig.CleanupOptions.CleanupRegex == "" && siteConfig.CleanupOptions.CleanupRegex != "" {96 sourceConfig.CleanupOptions.CleanupRegex = siteConfig.CleanupOptions.CleanupRegex97 }98}99func (p *Parser) updateTitleContent(sourceConfig *TitleContent, siteConfig *TitleContent) {100 if sourceConfig.AddPrefix == nil {101 if siteConfig.AddPrefix == nil {102 var siteConfigDefault bool103 siteConfig.AddPrefix = &siteConfigDefault104 }105 sourceConfig.AddPrefix = siteConfig.AddPrefix106 }107 if sourceConfig.TitleSelector == nil {108 if siteConfig.TitleSelector == nil {109 var siteConfigDefault string110 siteConfig.TitleSelector = &siteConfigDefault111 }112 sourceConfig.TitleSelector = siteConfig.TitleSelector113 }114 if sourceConfig.CleanupOptions.PrefixSelectors == nil {115 if siteConfig.CleanupOptions.PrefixSelectors == nil {116 var siteConfigDefault []string117 siteConfig.CleanupOptions.PrefixSelectors = &siteConfigDefault118 }119 sourceConfig.CleanupOptions.PrefixSelectors = siteConfig.CleanupOptions.PrefixSelectors120 }121 if sourceConfig.CleanupOptions.SuffixSelectors == nil {122 if siteConfig.CleanupOptions.SuffixSelectors == nil {123 var siteConfigDefault []string124 siteConfig.CleanupOptions.SuffixSelectors = &siteConfigDefault125 }126 sourceConfig.CleanupOptions.SuffixSelectors = siteConfig.CleanupOptions.SuffixSelectors127 }128 if sourceConfig.CleanupOptions.StripRegex == "" && siteConfig.CleanupOptions.StripRegex != "" {129 sourceConfig.CleanupOptions.StripRegex = siteConfig.CleanupOptions.StripRegex130 }131 if sourceConfig.CleanupOptions.CleanupRegex == "" && siteConfig.CleanupOptions.CleanupRegex != "" {132 sourceConfig.CleanupOptions.CleanupRegex = siteConfig.CleanupOptions.CleanupRegex133 }134}...
addPrefix
Using AI Code Generation
1import (2type parser struct {3}4func (p *parser) addPrefix(s string) string {5}6func main() {7 p := parser{prefix: "prefix_"}8 files, err := filepath.Glob("*.txt")9 if err != nil {10 fmt.Println(err)11 os.Exit(1)12 }13 for _, file := range files {14 fmt.Println(p.addPrefix(file))15 }16}
addPrefix
Using AI Code Generation
1import "fmt"2func main() {3 p.addPrefix("!", p.parsePrefixExpression)4}5import "fmt"6func main() {7 p.addInfix("=", p.parseInfixExpression)8}9import "fmt"10func main() {11 p.addPrefix("!", p.parsePrefixExpression)12 p.addInfix("=", p.parseInfixExpression)13}14import "fmt"15func main() {16 p.addPrefix("!", p.parsePrefixExpression)17 p.addInfix("=", p.parseInfixExpression)18 p.addInfix("==", p.parseInfixExpression)19}20import "fmt"21func main() {22 p.addPrefix("!", p.parsePrefixExpression)23 p.addInfix("=", p.parseInfixExpression)24 p.addInfix("==", p.parseInfixExpression)25 p.addInfix("+", p.parseInfixExpression)26}27import "fmt"28func main() {29 p.addPrefix("!", p.parsePrefixExpression)30 p.addInfix("=", p.parseInfixExpression)31 p.addInfix("==", p.parseInfixExpression)32 p.addInfix("+", p.parseInfixExpression)33 p.addPrefix("-", p.parsePrefixExpression)34}35import "fmt"36func main() {37 p.addPrefix("!", p.parsePrefixExpression)38 p.addInfix("=", p.parseInfixExpression)39 p.addInfix("==", p.parseInfixExpression)40 p.addInfix("+", p.parseInfixExpression)41 p.addPrefix("-", p.parsePrefixExpression)42 p.addInfix("*", p.parseInfixExpression
addPrefix
Using AI Code Generation
1import (2func main() {3 fmt.Println("Enter a string")4 reader := bufio.NewReader(os.Stdin)5 s, _ = reader.ReadString('\n')6 s = strings.TrimSpace(s)7 fmt.Println("Enter a prefix")8 reader = bufio.NewReader(os.Stdin)9 prefix, _ := reader.ReadString('\n')10 prefix = strings.TrimSpace(prefix)11 parser := Parser{}12 fmt.Println(parser.addPrefix(s, prefix))13}14import (15type Parser struct {16}17func (p *Parser) addPrefix(s string, prefix string) string {18 return prefix + strings.ToUpper(s)19}20import (21func TestAddPrefix(t *testing.T) {22 parser := Parser{}23 result := parser.addPrefix("hello", "pre")24 if result != expected {25 t.Errorf("Expected %s, but got %s", expected, result)26 }27}28--- FAIL: TestAddPrefix (0.00s)
addPrefix
Using AI Code Generation
1import (2func main() {3 p.addPrefix("Mr.", "Dr.", "Mrs.")4 fmt.Println(p.prefix)5}6import (7func main() {8 p.addPrefix("Mr.", "Dr.", "Mrs.")9 fmt.Println(p.prefix)10}11import (12func main() {13 p.addPrefix("Mr.", "Dr.", "Mrs.")14 fmt.Println(p.prefix)15}16import (17func main() {18 p.addPrefix("Mr.", "Dr.", "Mrs.")19 fmt.Println(p.prefix)20}21import (22func main() {23 p.addPrefix("Mr.", "Dr.", "Mrs.")24 fmt.Println(p.prefix)25}26import (27func main() {28 p.addPrefix("Mr.", "Dr.", "Mrs.")29 fmt.Println(p.prefix)30}31import (32func main() {33 p.addPrefix("Mr.", "Dr.", "Mrs.")34 fmt.Println(p.prefix)35}36import (37func main() {38 p.addPrefix("Mr.", "Dr.", "Mrs.")39 fmt.Println(p.prefix)40}41import (42func main() {43 p.addPrefix("Mr.", "Dr.", "Mrs.")44 fmt.Println(p.prefix)45}
addPrefix
Using AI Code Generation
1import "fmt"2func main() {3 parser := Parser{}4 parser.AddPrefix("!", parser.parsePrefixExpression)5 parser.AddPrefix("-", parser.parsePrefixExpression)6}7import "fmt"8func main() {9 parser := Parser{}10 parser.AddInfix("+", parser.parseInfixExpression)11 parser.AddInfix("-", parser.parseInfixExpression)12}13import "fmt"14func main() {15 parser := Parser{}16 parser.AddInfix("+", parser.parseInfixExpression)17 parser.AddInfix("-", parser.parseInfixExpression)18}19import "fmt"20func main() {21 parser := Parser{}22 parser.AddInfix("+", parser.parseInfixExpression)23 parser.AddInfix("-", parser.parseInfixExpression)24}25import "fmt"26func main() {27 parser := Parser{}28 parser.AddInfix("+", parser.parseInfixExpression)29 parser.AddInfix("-", parser.parseInfixExpression)30}31import "fmt"32func main() {33 parser := Parser{}34 parser.AddInfix("+", parser.parseInfixExpression)35 parser.AddInfix("-", parser.parseInfixExpression)36}37import "fmt"38func main() {39 parser := Parser{}40 parser.AddInfix("+", parser.parseInfixExpression)41 parser.AddInfix("-", parser.parseInfixExpression)42}43import "fmt"44func main() {45 parser := Parser{}46 parser.AddInfix("+", parser.parseInfixExpression)47 parser.AddInfix("-", parser.parseInfixExpression)48}49import "fmt"50func main() {
addPrefix
Using AI Code Generation
1import (2func main() {3 p := parser.NewParser()4 p.AddPrefix("Hello")5 fmt.Println(p.Prefix)6}7import (8func main() {9 p := parser.NewParser()10 p.AddPrefix("Hello")11 fmt.Println(p.Prefix)12}13import (14func main() {15 p := parser.NewParser()16 p.AddPrefix("Hello")17 fmt.Println(p.Prefix)18}19import (20type Parser struct {21}22func NewParser() *Parser {23 return &Parser{}24}25func (p *Parser) AddPrefix(prefix string) {26}27func (p *Parser) PrintPrefix() {28 fmt.Println(p.Prefix)29}30require (
addPrefix
Using AI Code Generation
1import (2func main() {3 p := parser{"Hello"}4 fmt.Println(p.addPrefix("World"))5}6type parser struct {7}8func (p *parser) addPrefix(prefix string) string {9}
addPrefix
Using AI Code Generation
1import ("parser"; "fmt")2func main() {3 parser.addPrefix("go", "golang")4 fmt.Println(parser.Parse("go is a good language"))5}6import ("parser"; "fmt")7func main() {8 parser.addPrefix("go", "golang")9 fmt.Println(parser.Parse("go is a good language"))10}11import "strings"12var prefixMap = make(map[string]string)13func addPrefix(key, value string) {14}15func Parse(input string) string {16 for k, v := range prefixMap {17 input = strings.Replace(input, k, v, -1)18 }19}20import "strings"21var prefixMap = make(map[string]string)22func AddPrefix(key, value string) {23}24func Parse(input string) string {25 for k, v := range prefixMap {26 input = strings.Replace(input, k, v, -1)27 }28}29import "strings"30var prefixMap = make(map[string]string)31func AddPrefix(key, value string) {32}33func Parse(input string) string {34 for k, v := range prefixMap {35 input = strings.Replace(input, k, v, -1)36 }37}
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!